[ 556.139133] env[62684]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=62684) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 556.139543] env[62684]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=62684) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 556.139658] env[62684]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=62684) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 556.140056] env[62684]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 556.246515] env[62684]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=62684) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:383}} [ 556.255724] env[62684]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.014s {{(pid=62684) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:421}} [ 556.859912] env[62684]: INFO nova.virt.driver [None req-c1c59c41-81c1-41bc-b784-a7220f57ad62 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 556.929429] env[62684]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 556.929612] env[62684]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 556.929680] env[62684]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=62684) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 560.177027] env[62684]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-f66bd1b8-0cad-4332-b70e-f23fc7538aaa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.193013] env[62684]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=62684) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 560.193143] env[62684]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-5e77646a-a5b8-476c-bb44-cbef3b76879d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.221647] env[62684]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 30285. [ 560.221873] env[62684]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.292s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 560.222387] env[62684]: INFO nova.virt.vmwareapi.driver [None req-c1c59c41-81c1-41bc-b784-a7220f57ad62 None None] VMware vCenter version: 7.0.3 [ 560.226189] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-995f16d8-2a5f-4d5c-acfb-b14b8b61a8e6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.248375] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b320eda-d1e0-483d-8abc-f09f5672d454 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.255868] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b85363ff-7afe-4a90-abbe-1adc3e882761 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.261852] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9d2cc2e-9980-449f-b3b9-9affcbe89c9c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.275997] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30705fcd-d587-4f42-88cd-3e5b6a2144ce {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.282162] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8f6db08-bb2f-435a-959c-0b9eafc6fc89 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.312093] env[62684]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-f4991a16-b08f-467f-9e3c-0857a08a2718 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.317598] env[62684]: DEBUG nova.virt.vmwareapi.driver [None req-c1c59c41-81c1-41bc-b784-a7220f57ad62 None None] Extension org.openstack.compute already exists. {{(pid=62684) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:226}} [ 560.320307] env[62684]: INFO nova.compute.provider_config [None req-c1c59c41-81c1-41bc-b784-a7220f57ad62 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 560.824346] env[62684]: DEBUG nova.context [None req-c1c59c41-81c1-41bc-b784-a7220f57ad62 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),d61d2809-55d4-4098-80d1-882e744a3a55(cell1) {{(pid=62684) load_cells /opt/stack/nova/nova/context.py:464}} [ 560.826487] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 560.826718] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 560.827443] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 560.827903] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] Acquiring lock "d61d2809-55d4-4098-80d1-882e744a3a55" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 560.828107] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] Lock "d61d2809-55d4-4098-80d1-882e744a3a55" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 560.829132] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] Lock "d61d2809-55d4-4098-80d1-882e744a3a55" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 560.850172] env[62684]: INFO dbcounter [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] Registered counter for database nova_cell0 [ 560.858314] env[62684]: INFO dbcounter [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] Registered counter for database nova_cell1 [ 560.861536] env[62684]: DEBUG oslo_db.sqlalchemy.engines [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62684) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:395}} [ 560.861909] env[62684]: DEBUG oslo_db.sqlalchemy.engines [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62684) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:395}} [ 560.866972] env[62684]: ERROR nova.db.main.api [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 560.866972] env[62684]: result = function(*args, **kwargs) [ 560.866972] env[62684]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 560.866972] env[62684]: return func(*args, **kwargs) [ 560.866972] env[62684]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 560.866972] env[62684]: result = fn(*args, **kwargs) [ 560.866972] env[62684]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 560.866972] env[62684]: return f(*args, **kwargs) [ 560.866972] env[62684]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 560.866972] env[62684]: return db.service_get_minimum_version(context, binaries) [ 560.866972] env[62684]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 560.866972] env[62684]: _check_db_access() [ 560.866972] env[62684]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 560.866972] env[62684]: stacktrace = ''.join(traceback.format_stack()) [ 560.866972] env[62684]: [ 560.867995] env[62684]: ERROR nova.db.main.api [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 560.867995] env[62684]: result = function(*args, **kwargs) [ 560.867995] env[62684]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 560.867995] env[62684]: return func(*args, **kwargs) [ 560.867995] env[62684]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 560.867995] env[62684]: result = fn(*args, **kwargs) [ 560.867995] env[62684]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 560.867995] env[62684]: return f(*args, **kwargs) [ 560.867995] env[62684]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 560.867995] env[62684]: return db.service_get_minimum_version(context, binaries) [ 560.867995] env[62684]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 560.867995] env[62684]: _check_db_access() [ 560.867995] env[62684]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 560.867995] env[62684]: stacktrace = ''.join(traceback.format_stack()) [ 560.867995] env[62684]: [ 560.868427] env[62684]: WARNING nova.objects.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] Failed to get minimum service version for cell d61d2809-55d4-4098-80d1-882e744a3a55 [ 560.868528] env[62684]: WARNING nova.objects.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 560.868949] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] Acquiring lock "singleton_lock" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 560.869143] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] Acquired lock "singleton_lock" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 560.869389] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] Releasing lock "singleton_lock" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 560.869715] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] Full set of CONF: {{(pid=62684) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 560.869858] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] ******************************************************************************** {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 560.869985] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] Configuration options gathered from: {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 560.870135] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 560.870331] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 560.870459] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] ================================================================================ {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 560.870666] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] allow_resize_to_same_host = True {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.870840] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] arq_binding_timeout = 300 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.870973] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] backdoor_port = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.871114] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] backdoor_socket = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.871280] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] block_device_allocate_retries = 60 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.871443] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] block_device_allocate_retries_interval = 3 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.871614] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cert = self.pem {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.871783] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.871952] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] compute_monitors = [] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.872137] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] config_dir = [] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.872309] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] config_drive_format = iso9660 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.872442] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.872606] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] config_source = [] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.872827] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] console_host = devstack {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.873008] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] control_exchange = nova {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.873216] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cpu_allocation_ratio = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.873386] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] daemon = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.873572] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] debug = True {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.873745] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] default_access_ip_network_name = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.873913] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] default_availability_zone = nova {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.874090] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] default_ephemeral_format = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.874258] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] default_green_pool_size = 1000 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.874496] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.874666] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] default_schedule_zone = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.874829] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] disk_allocation_ratio = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.874991] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] enable_new_services = True {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.875187] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] enabled_apis = ['osapi_compute'] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.875351] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] enabled_ssl_apis = [] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.875514] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] flat_injected = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.875747] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] force_config_drive = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.876452] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] force_raw_images = True {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.876452] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] graceful_shutdown_timeout = 5 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.876452] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] heal_instance_info_cache_interval = 60 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.876695] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] host = cpu-1 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.876695] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] initial_cpu_allocation_ratio = 4.0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.876854] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] initial_disk_allocation_ratio = 1.0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.877027] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] initial_ram_allocation_ratio = 1.0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.877258] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.877425] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] instance_build_timeout = 0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.877583] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] instance_delete_interval = 300 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.877750] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] instance_format = [instance: %(uuid)s] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.877921] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] instance_name_template = instance-%08x {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.878161] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] instance_usage_audit = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.878348] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] instance_usage_audit_period = month {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.878517] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.878735] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] instances_path = /opt/stack/data/nova/instances {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.879011] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] internal_service_availability_zone = internal {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.879209] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] key = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.879377] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] live_migration_retry_count = 30 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.879599] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] log_color = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.879784] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] log_config_append = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.879955] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.880131] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] log_dir = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.880291] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] log_file = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.880418] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] log_options = True {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.880578] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] log_rotate_interval = 1 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.880747] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] log_rotate_interval_type = days {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.880914] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] log_rotation_type = none {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.881054] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.881181] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.881351] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.881517] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.881642] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.881804] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] long_rpc_timeout = 1800 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.881963] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] max_concurrent_builds = 10 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.882136] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] max_concurrent_live_migrations = 1 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.882293] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] max_concurrent_snapshots = 5 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.882453] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] max_local_block_devices = 3 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.882613] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] max_logfile_count = 30 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.882768] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] max_logfile_size_mb = 200 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.882925] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] maximum_instance_delete_attempts = 5 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.883102] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] metadata_listen = 0.0.0.0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.883272] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] metadata_listen_port = 8775 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.883439] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] metadata_workers = 2 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.883628] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] migrate_max_retries = -1 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.883807] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] mkisofs_cmd = genisoimage {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.884029] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] my_block_storage_ip = 10.180.1.21 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.884205] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] my_ip = 10.180.1.21 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.884382] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] network_allocate_retries = 0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.884571] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.884767] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] osapi_compute_listen = 0.0.0.0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.884944] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] osapi_compute_listen_port = 8774 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.885128] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] osapi_compute_unique_server_name_scope = {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.885296] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] osapi_compute_workers = 2 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.885461] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] password_length = 12 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.885620] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] periodic_enable = True {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.885792] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] periodic_fuzzy_delay = 60 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.886012] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] pointer_model = usbtablet {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.886196] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] preallocate_images = none {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.886357] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] publish_errors = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.886495] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] pybasedir = /opt/stack/nova {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.886663] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] ram_allocation_ratio = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.886804] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] rate_limit_burst = 0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.886971] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] rate_limit_except_level = CRITICAL {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.887144] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] rate_limit_interval = 0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.887304] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] reboot_timeout = 0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.887459] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] reclaim_instance_interval = 0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.887611] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] record = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.887778] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] reimage_timeout_per_gb = 60 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.887942] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] report_interval = 120 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.888115] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] rescue_timeout = 0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.888277] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] reserved_host_cpus = 0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.888435] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] reserved_host_disk_mb = 0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.888592] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] reserved_host_memory_mb = 512 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.888750] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] reserved_huge_pages = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.888907] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] resize_confirm_window = 0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.889073] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] resize_fs_using_block_device = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.889233] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] resume_guests_state_on_host_boot = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.889398] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.889559] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] rpc_response_timeout = 60 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.889719] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] run_external_periodic_tasks = True {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.889883] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] running_deleted_instance_action = reap {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.890052] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] running_deleted_instance_poll_interval = 1800 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.890214] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] running_deleted_instance_timeout = 0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.890372] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] scheduler_instance_sync_interval = 120 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.890538] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] service_down_time = 720 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.890707] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] servicegroup_driver = db {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.890861] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] shell_completion = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.891063] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] shelved_offload_time = 0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.891241] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] shelved_poll_interval = 3600 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.891411] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] shutdown_timeout = 0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.891570] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] source_is_ipv6 = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.891730] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] ssl_only = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.891989] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.892184] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] sync_power_state_interval = 600 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.892350] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] sync_power_state_pool_size = 1000 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.892516] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] syslog_log_facility = LOG_USER {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.892674] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] tempdir = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.892840] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] timeout_nbd = 10 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.893014] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] transport_url = **** {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.893183] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] update_resources_interval = 0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.893342] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] use_cow_images = True {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.893498] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] use_journal = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.893676] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] use_json = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.893838] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] use_rootwrap_daemon = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.893992] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] use_stderr = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.894163] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] use_syslog = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.894317] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vcpu_pin_set = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.894480] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vif_plugging_is_fatal = True {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.894643] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vif_plugging_timeout = 300 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.894810] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] virt_mkfs = [] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.894969] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] volume_usage_poll_interval = 0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.895161] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] watch_log_file = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.895332] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] web = /usr/share/spice-html5 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 560.895515] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.895684] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.895848] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] os_brick.wait_mpath_device_interval = 1 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.896077] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_concurrency.disable_process_locking = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.896686] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.896845] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.897031] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.897214] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_metrics.metrics_process_name = {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.897389] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.897554] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.897739] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api.auth_strategy = keystone {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.897906] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api.compute_link_prefix = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.898129] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.898274] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api.dhcp_domain = novalocal {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.898439] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api.enable_instance_password = True {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.898607] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api.glance_link_prefix = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.898776] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.898949] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api.instance_list_cells_batch_strategy = distributed {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.899131] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api.instance_list_per_project_cells = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.899297] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api.list_records_by_skipping_down_cells = True {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.899463] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api.local_metadata_per_cell = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.899631] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api.max_limit = 1000 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.899805] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api.metadata_cache_expiration = 15 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.899981] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api.neutron_default_tenant_id = default {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.900170] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api.response_validation = warn {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.900341] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api.use_neutron_default_nets = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.900515] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.900680] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api.vendordata_dynamic_failure_fatal = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.900852] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.901038] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api.vendordata_dynamic_ssl_certfile = {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.901215] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api.vendordata_dynamic_targets = [] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.901380] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api.vendordata_jsonfile_path = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.901563] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api.vendordata_providers = ['StaticJSON'] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.901757] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.backend = dogpile.cache.memcached {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.901926] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.backend_argument = **** {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.902102] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.backend_expiration_time = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.902278] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.config_prefix = cache.oslo {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.902451] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.dead_timeout = 60.0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.902616] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.debug_cache_backend = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.902782] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.enable_retry_client = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.902943] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.enable_socket_keepalive = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.903166] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.enabled = True {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.903347] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.enforce_fips_mode = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.903518] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.expiration_time = 600 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.903707] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.hashclient_retry_attempts = 2 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.903883] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.hashclient_retry_delay = 1.0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.904066] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.memcache_dead_retry = 300 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.904231] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.memcache_password = **** {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.904398] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.904561] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.904726] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.memcache_pool_maxsize = 10 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.904891] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.memcache_pool_unused_timeout = 60 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.905071] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.memcache_sasl_enabled = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.905257] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.memcache_servers = ['localhost:11211'] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.905427] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.memcache_socket_timeout = 1.0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.905589] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.memcache_username = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.905756] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.proxies = [] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.905938] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.redis_db = 0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.906128] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.redis_password = **** {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.906306] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.redis_sentinel_service_name = mymaster {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.906482] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.906653] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.redis_server = localhost:6379 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.906824] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.redis_socket_timeout = 1.0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.906980] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.redis_username = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.907160] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.retry_attempts = 2 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.907328] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.retry_delay = 0.0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.907493] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.socket_keepalive_count = 1 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.907657] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.socket_keepalive_idle = 1 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.907818] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.socket_keepalive_interval = 1 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.908025] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.tls_allowed_ciphers = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.908216] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.tls_cafile = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.908380] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.tls_certfile = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.908542] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.tls_enabled = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.908703] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cache.tls_keyfile = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.908877] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cinder.auth_section = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.909063] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cinder.auth_type = password {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.909231] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cinder.cafile = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.909407] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cinder.catalog_info = volumev3::publicURL {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.909570] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cinder.certfile = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.909738] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cinder.collect_timing = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.909903] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cinder.cross_az_attach = True {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.910077] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cinder.debug = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.910242] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cinder.endpoint_template = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.910409] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cinder.http_retries = 3 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.910573] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cinder.insecure = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.910735] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cinder.keyfile = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.910908] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cinder.os_region_name = RegionOne {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.911085] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cinder.split_loggers = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.911249] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cinder.timeout = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.911422] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.911585] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] compute.cpu_dedicated_set = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.911745] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] compute.cpu_shared_set = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.911910] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] compute.image_type_exclude_list = [] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.912087] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] compute.live_migration_wait_for_vif_plug = True {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.912255] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] compute.max_concurrent_disk_ops = 0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.912419] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] compute.max_disk_devices_to_attach = -1 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.912581] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.912751] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.912913] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] compute.resource_provider_association_refresh = 300 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.913086] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.913250] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] compute.shutdown_retry_interval = 10 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.913431] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.913647] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] conductor.workers = 2 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.913834] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] console.allowed_origins = [] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.913998] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] console.ssl_ciphers = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.914185] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] console.ssl_minimum_version = default {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.914358] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] consoleauth.enforce_session_timeout = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.914547] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] consoleauth.token_ttl = 600 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.914711] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cyborg.cafile = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.914869] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cyborg.certfile = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.915084] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cyborg.collect_timing = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.915249] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cyborg.connect_retries = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.915416] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cyborg.connect_retry_delay = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.915577] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cyborg.endpoint_override = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.915741] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cyborg.insecure = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.915899] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cyborg.keyfile = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.916074] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cyborg.max_version = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.916239] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cyborg.min_version = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.916397] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cyborg.region_name = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.916559] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cyborg.retriable_status_codes = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.916716] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cyborg.service_name = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.916883] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cyborg.service_type = accelerator {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.917054] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cyborg.split_loggers = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.917274] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cyborg.status_code_retries = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.917466] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cyborg.status_code_retry_delay = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.917629] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cyborg.timeout = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.917818] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.917976] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] cyborg.version = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.918161] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] database.asyncio_connection = **** {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.918325] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] database.asyncio_slave_connection = **** {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.918493] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] database.backend = sqlalchemy {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.918662] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] database.connection = **** {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.918827] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] database.connection_debug = 0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.918995] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] database.connection_parameters = {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.919172] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] database.connection_recycle_time = 3600 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.919335] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] database.connection_trace = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.919497] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] database.db_inc_retry_interval = True {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.919659] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] database.db_max_retries = 20 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.919821] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] database.db_max_retry_interval = 10 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.919998] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] database.db_retry_interval = 1 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.920213] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] database.max_overflow = 50 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.920382] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] database.max_pool_size = 5 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.920544] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] database.max_retries = 10 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.920713] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] database.mysql_sql_mode = TRADITIONAL {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.920873] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] database.mysql_wsrep_sync_wait = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.921038] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] database.pool_timeout = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.921205] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] database.retry_interval = 10 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.921363] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] database.slave_connection = **** {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.921521] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] database.sqlite_synchronous = True {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.921680] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] database.use_db_reconnect = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.921848] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api_database.asyncio_connection = **** {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.922011] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api_database.asyncio_slave_connection = **** {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.922189] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api_database.backend = sqlalchemy {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.922356] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api_database.connection = **** {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.922527] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api_database.connection_debug = 0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.922720] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api_database.connection_parameters = {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.922904] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api_database.connection_recycle_time = 3600 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.923083] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api_database.connection_trace = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.923248] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api_database.db_inc_retry_interval = True {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.923412] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api_database.db_max_retries = 20 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.923596] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api_database.db_max_retry_interval = 10 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.923778] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api_database.db_retry_interval = 1 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.923945] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api_database.max_overflow = 50 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.924134] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api_database.max_pool_size = 5 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.924310] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api_database.max_retries = 10 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.924481] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.924644] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api_database.mysql_wsrep_sync_wait = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.924804] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api_database.pool_timeout = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.924966] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api_database.retry_interval = 10 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.925139] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api_database.slave_connection = **** {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.925299] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] api_database.sqlite_synchronous = True {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.925473] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] devices.enabled_mdev_types = [] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.925650] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.925823] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] ephemeral_storage_encryption.default_format = luks {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.926032] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] ephemeral_storage_encryption.enabled = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.926212] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] ephemeral_storage_encryption.key_size = 512 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.926384] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] glance.api_servers = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.926548] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] glance.cafile = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.926711] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] glance.certfile = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.926874] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] glance.collect_timing = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.927045] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] glance.connect_retries = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.927210] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] glance.connect_retry_delay = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.927373] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] glance.debug = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.927537] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] glance.default_trusted_certificate_ids = [] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.927698] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] glance.enable_certificate_validation = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.927862] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] glance.enable_rbd_download = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.928032] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] glance.endpoint_override = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.928206] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] glance.insecure = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.928369] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] glance.keyfile = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.928531] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] glance.max_version = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.928691] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] glance.min_version = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.928855] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] glance.num_retries = 3 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.929034] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] glance.rbd_ceph_conf = {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.929203] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] glance.rbd_connect_timeout = 5 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.929373] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] glance.rbd_pool = {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.929543] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] glance.rbd_user = {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.929706] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] glance.region_name = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.929873] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] glance.retriable_status_codes = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.930041] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] glance.service_name = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.930217] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] glance.service_type = image {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.930379] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] glance.split_loggers = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.930538] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] glance.status_code_retries = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.930701] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] glance.status_code_retry_delay = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.930860] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] glance.timeout = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.931051] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.931223] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] glance.verify_glance_signatures = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.931384] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] glance.version = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.931554] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] guestfs.debug = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.931721] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] mks.enabled = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.932117] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.932329] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] image_cache.manager_interval = 2400 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.932507] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] image_cache.precache_concurrency = 1 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.932677] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] image_cache.remove_unused_base_images = True {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.932854] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.933036] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.933215] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] image_cache.subdirectory_name = _base {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.933395] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] ironic.api_max_retries = 60 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.933578] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] ironic.api_retry_interval = 2 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.933751] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] ironic.auth_section = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.933923] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] ironic.auth_type = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.934094] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] ironic.cafile = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.934259] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] ironic.certfile = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.934425] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] ironic.collect_timing = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.934592] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] ironic.conductor_group = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.934756] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] ironic.connect_retries = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.934917] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] ironic.connect_retry_delay = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.935089] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] ironic.endpoint_override = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.935258] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] ironic.insecure = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.935418] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] ironic.keyfile = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.935580] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] ironic.max_version = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.935742] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] ironic.min_version = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.935935] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] ironic.peer_list = [] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.936120] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] ironic.region_name = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.936288] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] ironic.retriable_status_codes = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.936457] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] ironic.serial_console_state_timeout = 10 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.936619] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] ironic.service_name = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.936792] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] ironic.service_type = baremetal {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.936955] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] ironic.shard = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.937133] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] ironic.split_loggers = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.937296] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] ironic.status_code_retries = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.937454] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] ironic.status_code_retry_delay = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.937614] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] ironic.timeout = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.937796] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.937984] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] ironic.version = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.938200] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.938380] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] key_manager.fixed_key = **** {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.938568] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.938738] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] barbican.barbican_api_version = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.938902] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] barbican.barbican_endpoint = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.939087] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] barbican.barbican_endpoint_type = public {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.939251] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] barbican.barbican_region_name = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.939412] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] barbican.cafile = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.939572] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] barbican.certfile = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.939738] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] barbican.collect_timing = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.939899] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] barbican.insecure = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.940069] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] barbican.keyfile = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.940242] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] barbican.number_of_retries = 60 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.940405] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] barbican.retry_delay = 1 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.940569] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] barbican.send_service_user_token = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.940733] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] barbican.split_loggers = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.940891] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] barbican.timeout = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.941064] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] barbican.verify_ssl = True {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.941228] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] barbican.verify_ssl_path = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.941398] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] barbican_service_user.auth_section = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.941606] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] barbican_service_user.auth_type = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.941810] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] barbican_service_user.cafile = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.941985] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] barbican_service_user.certfile = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.942170] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] barbican_service_user.collect_timing = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.942358] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] barbican_service_user.insecure = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.942523] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] barbican_service_user.keyfile = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.942688] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] barbican_service_user.split_loggers = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.942850] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] barbican_service_user.timeout = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.943031] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vault.approle_role_id = **** {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.943199] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vault.approle_secret_id = **** {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.943371] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vault.kv_mountpoint = secret {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.943581] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vault.kv_path = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.943726] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vault.kv_version = 2 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.943893] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vault.namespace = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.944068] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vault.root_token_id = **** {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.944232] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vault.ssl_ca_crt_file = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.944400] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vault.timeout = 60.0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.944565] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vault.use_ssl = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.944737] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.944910] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] keystone.auth_section = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.945088] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] keystone.auth_type = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.945252] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] keystone.cafile = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.945412] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] keystone.certfile = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.945577] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] keystone.collect_timing = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.945738] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] keystone.connect_retries = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.945904] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] keystone.connect_retry_delay = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.946069] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] keystone.endpoint_override = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.946237] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] keystone.insecure = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.946395] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] keystone.keyfile = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.946555] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] keystone.max_version = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.946716] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] keystone.min_version = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.946873] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] keystone.region_name = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.947043] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] keystone.retriable_status_codes = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.947206] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] keystone.service_name = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.947374] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] keystone.service_type = identity {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.947539] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] keystone.split_loggers = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.947688] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] keystone.status_code_retries = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.947845] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] keystone.status_code_retry_delay = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.947998] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] keystone.timeout = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.948192] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.948351] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] keystone.version = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.948549] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.connection_uri = {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.948709] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.cpu_mode = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.948874] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.cpu_model_extra_flags = [] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.949054] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.cpu_models = [] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.949230] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.cpu_power_governor_high = performance {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.949397] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.cpu_power_governor_low = powersave {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.949560] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.cpu_power_management = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.949759] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.949895] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.device_detach_attempts = 8 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.950071] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.device_detach_timeout = 20 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.950241] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.disk_cachemodes = [] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.950401] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.disk_prefix = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.950564] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.enabled_perf_events = [] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.950730] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.file_backed_memory = 0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.950901] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.gid_maps = [] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.951073] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.hw_disk_discard = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.951253] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.hw_machine_type = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.951433] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.images_rbd_ceph_conf = {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.951626] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.951795] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.951965] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.images_rbd_glance_store_name = {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.952150] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.images_rbd_pool = rbd {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.952326] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.images_type = default {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.952486] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.images_volume_group = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.952650] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.inject_key = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.952816] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.inject_partition = -2 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.952977] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.inject_password = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.953154] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.iscsi_iface = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.953317] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.iser_use_multipath = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.953482] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.live_migration_bandwidth = 0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.953693] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.live_migration_completion_timeout = 800 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.953868] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.live_migration_downtime = 500 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.954048] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.live_migration_downtime_delay = 75 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.954220] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.live_migration_downtime_steps = 10 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.954380] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.live_migration_inbound_addr = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.954544] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.live_migration_permit_auto_converge = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.954707] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.live_migration_permit_post_copy = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.954863] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.live_migration_scheme = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.955046] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.live_migration_timeout_action = abort {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.955213] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.live_migration_tunnelled = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.955378] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.live_migration_uri = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.955539] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.live_migration_with_native_tls = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.955696] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.max_queues = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.955877] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.mem_stats_period_seconds = 10 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.956139] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.956309] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.nfs_mount_options = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.956616] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.956799] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.num_aoe_discover_tries = 3 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.956970] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.num_iser_scan_tries = 5 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.957149] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.num_memory_encrypted_guests = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.957317] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.num_nvme_discover_tries = 5 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.957479] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.num_pcie_ports = 0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.957649] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.num_volume_scan_tries = 5 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.957810] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.pmem_namespaces = [] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.957973] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.quobyte_client_cfg = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.958290] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.958470] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.rbd_connect_timeout = 5 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.958637] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.958802] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.958970] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.rbd_secret_uuid = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.959139] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.rbd_user = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.959302] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.realtime_scheduler_priority = 1 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.959474] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.remote_filesystem_transport = ssh {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.959633] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.rescue_image_id = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.959791] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.rescue_kernel_id = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.959946] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.rescue_ramdisk_id = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.960127] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.rng_dev_path = /dev/urandom {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.960287] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.rx_queue_size = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.960452] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.smbfs_mount_options = {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.960747] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.960926] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.snapshot_compression = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.961102] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.snapshot_image_format = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.961333] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.961516] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.sparse_logical_volumes = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.961687] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.swtpm_enabled = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.961854] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.swtpm_group = tss {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.962031] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.swtpm_user = tss {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.962206] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.sysinfo_serial = unique {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.962363] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.tb_cache_size = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.962519] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.tx_queue_size = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.962680] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.uid_maps = [] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.962841] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.use_virtio_for_bridges = True {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.963015] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.virt_type = kvm {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.963193] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.volume_clear = zero {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.963356] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.volume_clear_size = 0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.963520] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.volume_use_multipath = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.963701] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.vzstorage_cache_path = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.963876] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.964058] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.vzstorage_mount_group = qemu {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.964226] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.vzstorage_mount_opts = [] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.964394] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.964683] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.964891] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.vzstorage_mount_user = stack {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.965072] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.965250] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] neutron.auth_section = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.965426] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] neutron.auth_type = password {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.965586] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] neutron.cafile = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.965748] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] neutron.certfile = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.965925] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] neutron.collect_timing = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.966107] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] neutron.connect_retries = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.966272] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] neutron.connect_retry_delay = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.966447] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] neutron.default_floating_pool = public {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.966606] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] neutron.endpoint_override = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.966770] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] neutron.extension_sync_interval = 600 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.966932] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] neutron.http_retries = 3 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.967106] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] neutron.insecure = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.967265] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] neutron.keyfile = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.967424] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] neutron.max_version = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.967598] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] neutron.metadata_proxy_shared_secret = **** {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.967757] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] neutron.min_version = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.967922] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] neutron.ovs_bridge = br-int {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.968098] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] neutron.physnets = [] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.968271] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] neutron.region_name = RegionOne {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.968429] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] neutron.retriable_status_codes = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.968596] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] neutron.service_metadata_proxy = True {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.968756] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] neutron.service_name = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.968919] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] neutron.service_type = network {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.969090] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] neutron.split_loggers = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.969253] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] neutron.status_code_retries = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.969409] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] neutron.status_code_retry_delay = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.969568] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] neutron.timeout = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.969746] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.969904] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] neutron.version = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.970087] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] notifications.bdms_in_notifications = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.970266] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] notifications.default_level = INFO {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.970443] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] notifications.notification_format = unversioned {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.970609] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] notifications.notify_on_state_change = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.970787] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.970964] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] pci.alias = [] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.971151] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] pci.device_spec = [] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.971317] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] pci.report_in_placement = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.971486] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] placement.auth_section = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.971661] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] placement.auth_type = password {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.971827] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] placement.auth_url = http://10.180.1.21/identity {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.971988] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] placement.cafile = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.972161] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] placement.certfile = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.972325] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] placement.collect_timing = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.972483] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] placement.connect_retries = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.972646] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] placement.connect_retry_delay = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.972805] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] placement.default_domain_id = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.972962] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] placement.default_domain_name = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.973134] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] placement.domain_id = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.973292] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] placement.domain_name = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.973450] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] placement.endpoint_override = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.973637] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] placement.insecure = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.973808] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] placement.keyfile = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.973968] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] placement.max_version = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.974140] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] placement.min_version = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.974311] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] placement.password = **** {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.974472] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] placement.project_domain_id = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.974643] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] placement.project_domain_name = Default {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.974819] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] placement.project_id = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.974989] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] placement.project_name = service {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.975174] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] placement.region_name = RegionOne {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.975335] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] placement.retriable_status_codes = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.975495] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] placement.service_name = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.975663] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] placement.service_type = placement {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.975826] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] placement.split_loggers = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.976008] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] placement.status_code_retries = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.976180] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] placement.status_code_retry_delay = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.976343] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] placement.system_scope = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.976500] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] placement.timeout = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.976659] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] placement.trust_id = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.976818] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] placement.user_domain_id = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.976984] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] placement.user_domain_name = Default {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.977156] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] placement.user_id = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.977331] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] placement.username = nova {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.977510] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.977670] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] placement.version = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.977886] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] quota.cores = 20 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.978016] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] quota.count_usage_from_placement = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.978192] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.978360] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] quota.injected_file_content_bytes = 10240 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.978526] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] quota.injected_file_path_length = 255 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.978689] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] quota.injected_files = 5 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.978857] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] quota.instances = 10 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.979031] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] quota.key_pairs = 100 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.979202] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] quota.metadata_items = 128 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.979365] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] quota.ram = 51200 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.979524] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] quota.recheck_quota = True {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.979689] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] quota.server_group_members = 10 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.979852] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] quota.server_groups = 10 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.980033] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.980203] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.980365] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] scheduler.image_metadata_prefilter = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.980527] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.980695] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] scheduler.max_attempts = 3 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.980859] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] scheduler.max_placement_results = 1000 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.981031] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.981195] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] scheduler.query_placement_for_image_type_support = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.981357] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.981538] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] scheduler.workers = 2 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.981740] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.981915] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.982107] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.982283] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.982453] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.982618] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.982783] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.982969] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.983155] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] filter_scheduler.host_subset_size = 1 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.983321] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.983481] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] filter_scheduler.image_properties_default_architecture = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.983685] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.983920] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] filter_scheduler.isolated_hosts = [] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.984119] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] filter_scheduler.isolated_images = [] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.984291] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] filter_scheduler.max_instances_per_host = 50 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.984457] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.984624] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.984800] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] filter_scheduler.pci_in_placement = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.984965] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.985141] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.985305] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.985477] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.985641] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.985806] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.985966] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] filter_scheduler.track_instance_changes = True {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.986156] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.986327] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] metrics.required = True {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.986537] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] metrics.weight_multiplier = 1.0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.986742] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] metrics.weight_of_unavailable = -10000.0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.986915] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] metrics.weight_setting = [] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.987256] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.987437] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] serial_console.enabled = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.987618] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] serial_console.port_range = 10000:20000 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.987792] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.987991] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.988144] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] serial_console.serialproxy_port = 6083 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.988314] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] service_user.auth_section = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.988489] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] service_user.auth_type = password {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.988651] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] service_user.cafile = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.988811] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] service_user.certfile = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.988973] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] service_user.collect_timing = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.989163] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] service_user.insecure = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.989323] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] service_user.keyfile = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.989493] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] service_user.send_service_user_token = True {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.989656] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] service_user.split_loggers = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.989815] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] service_user.timeout = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.989981] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] spice.agent_enabled = True {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.990157] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] spice.enabled = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.990461] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.990665] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] spice.html5proxy_host = 0.0.0.0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.990838] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] spice.html5proxy_port = 6082 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.991022] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] spice.image_compression = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.991181] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] spice.jpeg_compression = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.991342] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] spice.playback_compression = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.991505] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] spice.require_secure = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.991702] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] spice.server_listen = 127.0.0.1 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.991879] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.992051] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] spice.streaming_mode = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.992215] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] spice.zlib_compression = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.992380] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] upgrade_levels.baseapi = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.992551] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] upgrade_levels.compute = auto {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.992710] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] upgrade_levels.conductor = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.992866] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] upgrade_levels.scheduler = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.993041] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vendordata_dynamic_auth.auth_section = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.993209] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vendordata_dynamic_auth.auth_type = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.993365] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vendordata_dynamic_auth.cafile = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.993519] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vendordata_dynamic_auth.certfile = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.993702] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vendordata_dynamic_auth.collect_timing = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.993864] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vendordata_dynamic_auth.insecure = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.994029] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vendordata_dynamic_auth.keyfile = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.994195] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vendordata_dynamic_auth.split_loggers = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.994347] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vendordata_dynamic_auth.timeout = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.994520] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vmware.api_retry_count = 10 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.994675] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vmware.ca_file = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.994843] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vmware.cache_prefix = devstack-image-cache {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.995021] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vmware.cluster_name = testcl1 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.995185] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vmware.connection_pool_size = 10 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.995341] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vmware.console_delay_seconds = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.995507] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vmware.datastore_regex = ^datastore.* {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.995712] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.995883] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vmware.host_password = **** {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.996058] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vmware.host_port = 443 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.996227] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vmware.host_username = administrator@vsphere.local {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.996389] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vmware.insecure = True {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.996547] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vmware.integration_bridge = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.996708] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vmware.maximum_objects = 100 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.996865] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vmware.pbm_default_policy = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.997036] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vmware.pbm_enabled = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.997198] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vmware.pbm_wsdl_location = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.997366] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.997523] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vmware.serial_port_proxy_uri = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.997679] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vmware.serial_port_service_uri = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.997844] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vmware.task_poll_interval = 0.5 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.998062] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vmware.use_linked_clone = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.998189] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vmware.vnc_keymap = en-us {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.998353] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vmware.vnc_port = 5900 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.998514] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vmware.vnc_port_total = 10000 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.998697] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vnc.auth_schemes = ['none'] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.998869] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vnc.enabled = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.999175] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.999360] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.999587] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vnc.novncproxy_port = 6080 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 560.999796] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vnc.server_listen = 127.0.0.1 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.000031] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.000208] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vnc.vencrypt_ca_certs = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.000370] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vnc.vencrypt_client_cert = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.000527] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vnc.vencrypt_client_key = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.000706] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.000872] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] workarounds.disable_deep_image_inspection = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.001044] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] workarounds.disable_fallback_pcpu_query = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.001213] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] workarounds.disable_group_policy_check_upcall = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.001373] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.001538] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] workarounds.disable_rootwrap = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.001741] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] workarounds.enable_numa_live_migration = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.001949] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.002166] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.002367] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] workarounds.handle_virt_lifecycle_events = True {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.002532] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] workarounds.libvirt_disable_apic = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.002694] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] workarounds.never_download_image_if_on_rbd = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.002856] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.003029] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.003197] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.003359] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.003520] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.003716] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.003883] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.004058] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.004227] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.004410] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.004582] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] wsgi.client_socket_timeout = 900 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.004748] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] wsgi.default_pool_size = 1000 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.004913] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] wsgi.keep_alive = True {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.005101] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] wsgi.max_header_line = 16384 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.005270] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] wsgi.secure_proxy_ssl_header = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.005433] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] wsgi.ssl_ca_file = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.005593] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] wsgi.ssl_cert_file = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.005754] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] wsgi.ssl_key_file = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.005940] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] wsgi.tcp_keepidle = 600 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.006143] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.006315] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] zvm.ca_file = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.006473] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] zvm.cloud_connector_url = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.006767] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.006952] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] zvm.reachable_timeout = 300 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.007153] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_policy.enforce_new_defaults = True {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.007537] env[62684]: WARNING oslo_config.cfg [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] Deprecated: Option "enforce_scope" from group "oslo_policy" is deprecated for removal (This configuration was added temporarily to facilitate a smooth transition to the new RBAC. OpenStack will always enforce scope checks. This configuration option is deprecated and will be removed in the 2025.2 cycle.). Its value may be silently ignored in the future. [ 561.007735] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_policy.enforce_scope = True {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.007895] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_policy.policy_default_rule = default {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.008084] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.008262] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_policy.policy_file = policy.yaml {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.008431] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.008593] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.008754] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.008911] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.009084] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.009260] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_policy.remote_timeout = 60.0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.009432] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.009606] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.009787] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] profiler.connection_string = messaging:// {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.009952] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] profiler.enabled = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.010135] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] profiler.es_doc_type = notification {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.010302] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] profiler.es_scroll_size = 10000 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.010467] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] profiler.es_scroll_time = 2m {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.010628] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] profiler.filter_error_trace = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.010793] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] profiler.hmac_keys = **** {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.010957] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] profiler.sentinel_service_name = mymaster {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.011137] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] profiler.socket_timeout = 0.1 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.011299] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] profiler.trace_requests = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.011455] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] profiler.trace_sqlalchemy = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.011661] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] profiler_jaeger.process_tags = {} {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.011832] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] profiler_jaeger.service_name_prefix = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.011996] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] profiler_otlp.service_name_prefix = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.012181] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] remote_debug.host = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.012340] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] remote_debug.port = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.012514] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.012676] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.012838] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.012997] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.013172] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.013331] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.013490] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.013677] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.013846] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.014028] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_rabbit.hostname = devstack {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.014194] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.014361] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.014527] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.014690] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.014858] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.015027] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.015192] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.015360] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.015519] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.015677] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.015841] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.016008] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.016177] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.016335] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.016493] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.016649] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.016808] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.016964] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.017136] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.017296] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_rabbit.ssl = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.017458] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.017621] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.017829] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.017945] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.018122] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_rabbit.ssl_version = {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.018283] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.018462] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.018625] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_notifications.retry = -1 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.018797] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.018964] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_messaging_notifications.transport_url = **** {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.019154] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_limit.auth_section = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.019317] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_limit.auth_type = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.019474] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_limit.cafile = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.019629] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_limit.certfile = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.019790] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_limit.collect_timing = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.019947] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_limit.connect_retries = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.020117] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_limit.connect_retry_delay = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.020273] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_limit.endpoint_id = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.020439] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_limit.endpoint_interface = publicURL {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.020594] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_limit.endpoint_override = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.020749] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_limit.endpoint_region_name = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.020903] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_limit.endpoint_service_name = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.021067] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_limit.endpoint_service_type = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.021231] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_limit.insecure = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.021384] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_limit.keyfile = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.021547] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_limit.max_version = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.021719] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_limit.min_version = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.021874] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_limit.region_name = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.022041] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_limit.retriable_status_codes = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.022198] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_limit.service_name = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.022352] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_limit.service_type = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.022510] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_limit.split_loggers = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.022664] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_limit.status_code_retries = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.022820] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_limit.status_code_retry_delay = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.022974] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_limit.timeout = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.023141] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_limit.valid_interfaces = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.023293] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_limit.version = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.023453] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_reports.file_event_handler = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.023642] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_reports.file_event_handler_interval = 1 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.023817] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] oslo_reports.log_dir = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.023988] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.024175] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vif_plug_linux_bridge_privileged.group = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.024347] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.024512] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.024674] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.024835] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vif_plug_linux_bridge_privileged.user = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.025014] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.025182] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vif_plug_ovs_privileged.group = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.025341] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vif_plug_ovs_privileged.helper_command = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.025509] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.025704] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.025869] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] vif_plug_ovs_privileged.user = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.026051] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] os_vif_linux_bridge.flat_interface = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.026236] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.026407] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.026577] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.026749] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.026914] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.027092] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.027257] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] os_vif_linux_bridge.vlan_interface = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.027434] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.027604] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] os_vif_ovs.isolate_vif = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.027770] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.027935] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.028115] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.028287] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] os_vif_ovs.ovsdb_interface = native {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.028449] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] os_vif_ovs.per_port_bridge = False {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.028618] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] privsep_osbrick.capabilities = [21] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.028777] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] privsep_osbrick.group = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.028932] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] privsep_osbrick.helper_command = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.029107] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.029276] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] privsep_osbrick.thread_pool_size = 8 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.029432] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] privsep_osbrick.user = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.029616] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.029809] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] nova_sys_admin.group = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.029979] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] nova_sys_admin.helper_command = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.030226] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.030408] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] nova_sys_admin.thread_pool_size = 8 {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.030571] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] nova_sys_admin.user = None {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 561.030705] env[62684]: DEBUG oslo_service.service [None req-0ea2420e-5267-4f9a-a779-50c26e4c293f None None] ******************************************************************************** {{(pid=62684) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 561.031221] env[62684]: INFO nova.service [-] Starting compute node (version 29.1.0) [ 561.535123] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-385ed80d-a87c-4d3b-98ef-ef93c0b54e1d None None] Getting list of instances from cluster (obj){ [ 561.535123] env[62684]: value = "domain-c8" [ 561.535123] env[62684]: _type = "ClusterComputeResource" [ 561.535123] env[62684]: } {{(pid=62684) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 561.536302] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a219eae-ffa0-4fb4-9f3b-470cbe04a6bd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.545224] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-385ed80d-a87c-4d3b-98ef-ef93c0b54e1d None None] Got total of 0 instances {{(pid=62684) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 561.545738] env[62684]: WARNING nova.virt.vmwareapi.driver [None req-385ed80d-a87c-4d3b-98ef-ef93c0b54e1d None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 561.546209] env[62684]: INFO nova.virt.node [None req-385ed80d-a87c-4d3b-98ef-ef93c0b54e1d None None] Generated node identity c23c281e-ec1f-4876-972e-a98655f2084f [ 561.546436] env[62684]: INFO nova.virt.node [None req-385ed80d-a87c-4d3b-98ef-ef93c0b54e1d None None] Wrote node identity c23c281e-ec1f-4876-972e-a98655f2084f to /opt/stack/data/n-cpu-1/compute_id [ 562.048774] env[62684]: WARNING nova.compute.manager [None req-385ed80d-a87c-4d3b-98ef-ef93c0b54e1d None None] Compute nodes ['c23c281e-ec1f-4876-972e-a98655f2084f'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 563.054606] env[62684]: INFO nova.compute.manager [None req-385ed80d-a87c-4d3b-98ef-ef93c0b54e1d None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 564.059635] env[62684]: WARNING nova.compute.manager [None req-385ed80d-a87c-4d3b-98ef-ef93c0b54e1d None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 564.060033] env[62684]: DEBUG oslo_concurrency.lockutils [None req-385ed80d-a87c-4d3b-98ef-ef93c0b54e1d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 564.060120] env[62684]: DEBUG oslo_concurrency.lockutils [None req-385ed80d-a87c-4d3b-98ef-ef93c0b54e1d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.060271] env[62684]: DEBUG oslo_concurrency.lockutils [None req-385ed80d-a87c-4d3b-98ef-ef93c0b54e1d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 564.060423] env[62684]: DEBUG nova.compute.resource_tracker [None req-385ed80d-a87c-4d3b-98ef-ef93c0b54e1d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62684) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 564.061737] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9875b20b-221a-4e80-afaf-b383ef826840 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.069565] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73773a29-b3e3-4950-8042-447e0b33148f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.082864] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b8db3ef-841a-49b5-a25b-80c261993b6c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.088936] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57fba2e8-a7cd-4f24-8268-f3911035538c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.116646] env[62684]: DEBUG nova.compute.resource_tracker [None req-385ed80d-a87c-4d3b-98ef-ef93c0b54e1d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181274MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=62684) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 564.116855] env[62684]: DEBUG oslo_concurrency.lockutils [None req-385ed80d-a87c-4d3b-98ef-ef93c0b54e1d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 564.117127] env[62684]: DEBUG oslo_concurrency.lockutils [None req-385ed80d-a87c-4d3b-98ef-ef93c0b54e1d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.619604] env[62684]: WARNING nova.compute.resource_tracker [None req-385ed80d-a87c-4d3b-98ef-ef93c0b54e1d None None] No compute node record for cpu-1:c23c281e-ec1f-4876-972e-a98655f2084f: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host c23c281e-ec1f-4876-972e-a98655f2084f could not be found. [ 565.123144] env[62684]: INFO nova.compute.resource_tracker [None req-385ed80d-a87c-4d3b-98ef-ef93c0b54e1d None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: c23c281e-ec1f-4876-972e-a98655f2084f [ 566.630600] env[62684]: DEBUG nova.compute.resource_tracker [None req-385ed80d-a87c-4d3b-98ef-ef93c0b54e1d None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 566.630945] env[62684]: DEBUG nova.compute.resource_tracker [None req-385ed80d-a87c-4d3b-98ef-ef93c0b54e1d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 566.783867] env[62684]: INFO nova.scheduler.client.report [None req-385ed80d-a87c-4d3b-98ef-ef93c0b54e1d None None] [req-0c32fe59-2f43-4540-9ee9-84860b5be3b4] Created resource provider record via placement API for resource provider with UUID c23c281e-ec1f-4876-972e-a98655f2084f and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 566.800940] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1feb8209-8c1a-4f24-9db1-91434266d3d2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.808638] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a35d99d-f3c0-4b34-ac46-bc6423f6f3e1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.837441] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-193d70d5-4bb5-4db5-80c7-d78687997434 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.844718] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f5dbd90-4761-42b9-a6c4-519c8e5766ea {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.858131] env[62684]: DEBUG nova.compute.provider_tree [None req-385ed80d-a87c-4d3b-98ef-ef93c0b54e1d None None] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 567.398214] env[62684]: DEBUG nova.scheduler.client.report [None req-385ed80d-a87c-4d3b-98ef-ef93c0b54e1d None None] Updated inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 567.398462] env[62684]: DEBUG nova.compute.provider_tree [None req-385ed80d-a87c-4d3b-98ef-ef93c0b54e1d None None] Updating resource provider c23c281e-ec1f-4876-972e-a98655f2084f generation from 0 to 1 during operation: update_inventory {{(pid=62684) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 567.398602] env[62684]: DEBUG nova.compute.provider_tree [None req-385ed80d-a87c-4d3b-98ef-ef93c0b54e1d None None] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 567.454758] env[62684]: DEBUG nova.compute.provider_tree [None req-385ed80d-a87c-4d3b-98ef-ef93c0b54e1d None None] Updating resource provider c23c281e-ec1f-4876-972e-a98655f2084f generation from 1 to 2 during operation: update_traits {{(pid=62684) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 567.959865] env[62684]: DEBUG nova.compute.resource_tracker [None req-385ed80d-a87c-4d3b-98ef-ef93c0b54e1d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62684) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 567.960508] env[62684]: DEBUG oslo_concurrency.lockutils [None req-385ed80d-a87c-4d3b-98ef-ef93c0b54e1d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.843s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 567.960508] env[62684]: DEBUG nova.service [None req-385ed80d-a87c-4d3b-98ef-ef93c0b54e1d None None] Creating RPC server for service compute {{(pid=62684) start /opt/stack/nova/nova/service.py:186}} [ 567.973785] env[62684]: DEBUG nova.service [None req-385ed80d-a87c-4d3b-98ef-ef93c0b54e1d None None] Join ServiceGroup membership for this service compute {{(pid=62684) start /opt/stack/nova/nova/service.py:203}} [ 567.974076] env[62684]: DEBUG nova.servicegroup.drivers.db [None req-385ed80d-a87c-4d3b-98ef-ef93c0b54e1d None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=62684) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 611.976293] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 612.480043] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Getting list of instances from cluster (obj){ [ 612.480043] env[62684]: value = "domain-c8" [ 612.480043] env[62684]: _type = "ClusterComputeResource" [ 612.480043] env[62684]: } {{(pid=62684) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 612.481300] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6dcea3b-1485-41e8-b7bd-3770e80141c1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.489750] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Got total of 0 instances {{(pid=62684) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 612.489968] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 612.490270] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Getting list of instances from cluster (obj){ [ 612.490270] env[62684]: value = "domain-c8" [ 612.490270] env[62684]: _type = "ClusterComputeResource" [ 612.490270] env[62684]: } {{(pid=62684) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 612.491500] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f4dba6a-16d7-4f54-8fec-0f84fe7f3314 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.498411] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Got total of 0 instances {{(pid=62684) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 616.309295] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 616.309602] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 616.309808] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Starting heal instance info cache {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 616.309935] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Rebuilding the list of instances to heal {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 616.812769] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Didn't find any instances for network info cache update. {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 616.812992] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 616.813193] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 616.813391] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 616.813578] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 616.813757] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 616.813939] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 616.814117] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62684) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 616.814269] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 617.317233] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 617.317590] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 617.317656] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 617.317797] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62684) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 617.318709] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91fe738d-6a2a-459e-b13b-b384715a97ab {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.326933] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9b097b4-9c71-4cec-847b-31c561d14919 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.341141] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6326a189-dbfa-4f32-a067-658d675de5e5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.347478] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a97a875-0666-44f4-b020-4da6cb5d7990 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.375153] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181279MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=62684) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 617.375313] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 617.375524] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 618.393619] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 618.393845] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 618.407806] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a928d53-9d76-49f4-a46d-407d539ac296 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.414977] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-615e7229-a4ba-474e-b0ff-e617007bd26a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.444762] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23129b29-21d2-4f6f-8a29-2ba4ea008495 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.451957] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87064f1c-d58c-4149-8339-971b52736b19 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.465086] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 618.968919] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 619.473700] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62684) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 619.474197] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.098s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 679.460652] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 679.461154] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 679.966118] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 679.966274] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Starting heal instance info cache {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 679.966396] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Rebuilding the list of instances to heal {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 680.471316] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Didn't find any instances for network info cache update. {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 680.471702] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 680.471702] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 680.471818] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 680.471961] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 680.472121] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 680.472267] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 680.472393] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62684) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 680.472531] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 680.975807] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 680.976065] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 680.976236] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 680.976381] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62684) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 680.977298] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7f060f1-0d5d-4d9f-9cde-06361e659ba7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.985567] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81ec6289-e7b8-4a5e-8fcc-007512f063ab {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.999605] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47771ea0-ae90-4bd1-91b3-d851dbca0b89 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.005421] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-089d3d6b-32b3-4270-b24f-071d9691eabb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.032993] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181276MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=62684) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 681.033135] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 681.033319] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 682.053177] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 682.053473] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 682.067858] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fb58f26-e31e-4d20-a2dd-f27ccbae0c60 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.076850] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d4425c0-97d5-4b13-ac64-6b08f1d79dcc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.107021] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b23fa9e-e834-4528-a78f-f50e9387b801 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.116008] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e11d08e-315f-422d-b0c6-f8ec83b90dcb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.133194] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 682.636359] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 682.637650] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62684) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 682.637827] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.604s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 742.639791] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 742.640195] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 742.640234] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Starting heal instance info cache {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 742.640359] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Rebuilding the list of instances to heal {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 743.143316] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Didn't find any instances for network info cache update. {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 743.143559] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 743.143727] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 743.143816] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 743.143958] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 743.144101] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 743.144246] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 743.144479] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62684) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 743.144567] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 743.648155] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 743.648587] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 743.648587] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 743.648743] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62684) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 743.649590] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-391275c8-ae06-41e2-8b91-8f631210ed62 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.657712] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-694bb693-a1b0-42f4-8428-bac6522ae4a6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.671421] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b45dd5b-0f3e-432c-bf72-b9e2beac497c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.677249] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91328753-6862-44f6-b005-50016a7323ac {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.705591] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181275MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=62684) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 743.705796] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 743.706071] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 744.725459] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 744.725745] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 744.739256] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d7d038e-30b6-4f79-90de-f5b1a91044ca {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.747742] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb35f7e9-dec4-4db5-a7c2-4a4db3788c23 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.776332] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccd6887e-deb5-44a7-93ab-a25bb1a3bf7a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.783255] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da1fe4a4-3275-4659-804e-a856c3db53e0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.796618] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 745.300143] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 745.301346] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62684) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 745.301525] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.596s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 798.958430] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 798.958852] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 799.463384] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 799.463575] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 799.463705] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62684) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 799.463848] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 799.967054] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 799.967322] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 799.967477] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 799.967627] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62684) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 799.968660] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d7c19ac-f352-4258-aa8e-131ec2c3ca4f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.977440] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-875b83c1-a847-4118-99ff-e2dc0857a68e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.992015] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-995f5495-9295-4f9e-9822-5f785f8be526 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.998320] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4bfa21a-ab96-4ae7-9f39-c817641df355 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.028907] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181279MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=62684) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 800.029099] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 800.029269] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 801.047412] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 801.047645] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 801.061666] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c229c2c9-3c04-4dc6-bae6-0e6cffdb3688 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.069601] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46ec8987-3d47-4bc9-93e5-3289d2091132 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.099067] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-367280d8-b468-43b9-b11f-4b68deef6981 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.105897] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd10d021-ebbc-4655-97a1-03e26da9db80 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.118548] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 801.622065] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 801.623349] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62684) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 801.623529] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.594s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 802.461026] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 802.461443] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Starting heal instance info cache {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 802.461443] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Rebuilding the list of instances to heal {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 802.964715] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Didn't find any instances for network info cache update. {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 802.964993] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 802.965128] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 802.965328] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 802.965477] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 856.302308] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 856.302308] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Cleaning up deleted instances {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 856.806232] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] There are 0 instances to clean {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 856.806232] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 856.806232] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Cleaning up deleted instances with incomplete migration {{(pid=62684) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 857.308072] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 860.805432] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 860.805847] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 860.805890] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 860.806059] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 860.806210] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62684) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 860.806363] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 861.309973] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.310195] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.310345] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.310491] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62684) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 861.311466] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46515d60-5549-4111-95e0-d4a560fe6175 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.319473] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3bd518c-9d5c-4c91-a759-1179f8411640 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.332979] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2890d330-c338-4538-a27e-63e1aac873e6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.338973] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-356a05fa-e6e9-4840-8767-71c0d84c10fb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.367745] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181278MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=62684) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 861.367893] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.368060] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 862.390020] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 862.390020] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 862.404043] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9e425fb-288f-45a5-b37c-c917fbf0bb5b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.409860] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5678c2a4-7da0-4133-b694-c7bc5d0efa3b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.438933] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24763098-8034-48ce-acb6-50998ce3f44d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.445566] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5bb8774-f37c-477b-b2e6-2a009069396e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.458124] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 862.963468] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 862.963468] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62684) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 862.963468] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.595s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.458307] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 863.458628] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Starting heal instance info cache {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 863.458628] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Rebuilding the list of instances to heal {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 863.962598] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Didn't find any instances for network info cache update. {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 863.962868] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 863.962988] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 863.963153] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 917.801491] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 919.300817] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 919.301252] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62684) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 919.301252] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 919.804079] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 919.804399] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 919.804505] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.804679] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62684) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 919.805708] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-964f3624-fc48-484d-ad02-40c9dfbb1601 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.813730] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52e8e572-1325-469e-925c-ff7e85226189 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.827623] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-050d331b-d97d-476a-b073-3b37b54c0261 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.833666] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c48274a9-3582-4b75-b34b-0844bc61b6bb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.862704] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181270MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=62684) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 919.862855] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 919.863019] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 920.895132] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 920.895461] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 920.911270] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Refreshing inventories for resource provider c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 920.923278] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Updating ProviderTree inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 920.923462] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 920.935093] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Refreshing aggregate associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, aggregates: None {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 920.950273] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Refreshing trait associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 920.962895] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ef8e8d7-2e2d-4c4c-84a2-cfc85e14d789 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.968530] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a12be31e-42ad-4a67-9cd3-6a84636c5724 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.996685] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f47128b-bb46-4d31-9652-98b9d881805b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.003250] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ae52519-c488-477d-b3a0-b21ac46553e5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.015502] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 921.519862] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 921.519862] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62684) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 921.519862] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.657s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 923.514599] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 923.514961] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 923.515115] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Starting heal instance info cache {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 923.515226] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Rebuilding the list of instances to heal {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 924.017941] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Didn't find any instances for network info cache update. {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 924.018224] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 924.018353] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 924.018499] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 924.018644] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 924.018783] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 979.300654] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 979.300924] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62684) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 980.300706] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 980.804399] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.804654] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 980.804824] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 980.804972] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62684) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 980.805923] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0a50331-81a2-4c5b-95ff-be0e401ff507 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.814363] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f795e37-12a6-4bf0-b6f8-c5019b1f5bbf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.828343] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c742d91-c2fd-4015-94af-9207be306a0e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.834547] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a273bb29-3613-4f01-b4c3-aa4a43c99549 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.863407] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181271MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=62684) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 980.863577] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.863750] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.882881] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 981.883170] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 981.896303] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35f0a8eb-4c73-4439-b431-514fc209147d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.903695] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aba9101-e21f-4b4a-ba31-076cf3bfee35 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.932341] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ede85b0-42d0-4c4e-aa60-c00d8bb322c0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.939095] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0111d95b-6417-454a-8d60-b874f2f7e95c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.951733] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 982.454533] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 982.455778] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62684) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 982.455953] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.451744] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 984.452190] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 984.452190] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Starting heal instance info cache {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 984.452279] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Rebuilding the list of instances to heal {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 984.955592] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Didn't find any instances for network info cache update. {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 984.955837] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 984.955969] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 984.956128] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 984.956275] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 984.956415] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1041.301430] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1041.805683] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1041.805867] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62684) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1042.300551] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1042.300801] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1042.804448] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.804815] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1042.804856] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.805027] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62684) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1042.805953] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abe374c5-18e6-4d30-99a5-61ff0904421a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.813922] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caf918b5-7d44-459a-9f71-2246787dcd91 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.827415] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeb0bf3b-db52-41fc-8217-dadcceede806 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.833429] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c414723e-d019-477d-b9dc-c607dff929b2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.861076] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181268MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=62684) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1042.861207] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.861387] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1043.880236] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1043.880469] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1043.894592] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6255f0d-6c62-44ab-b6d7-df73f5dad20f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.901939] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56f40f4d-3abf-404b-a3e6-2b7261b09336 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.931720] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4af776ee-433d-4295-8c39-2414dd5c4edf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.939347] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96134431-1d35-4ac9-9aa7-d8e4b4ebd25e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.952081] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1044.454720] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1044.455994] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62684) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1044.456189] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.595s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1045.456898] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1045.457268] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Starting heal instance info cache {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1045.457268] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Rebuilding the list of instances to heal {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1045.961104] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Didn't find any instances for network info cache update. {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1045.961343] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1045.961491] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1045.961635] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1045.961781] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1046.301412] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1102.296146] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1102.300755] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1102.804109] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1102.804358] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1102.804511] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1102.804664] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62684) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1102.805547] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4a7d92b-46bb-44b9-be21-52f68f62fdec {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.813641] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af0f55ff-dbb1-49c0-bf97-e64ea70d451a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.827435] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c2a2d9f-8a2e-4ea6-9aec-06030fcf1b69 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.833728] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-983e7053-4bc6-4d11-a887-570f53bf0e54 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.861596] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181284MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=62684) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1102.861746] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1102.861909] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1103.880765] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1103.881027] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1103.895472] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9958510b-d73f-4085-98c5-f3e30c7ebf76 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.902896] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46e2dc90-e5b7-4f04-8b49-206c16e7e717 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.932550] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-776331bc-a2db-4d92-806b-790d4b926796 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.939768] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5b70f4c-ebbc-48ce-a2c2-403430e0eb00 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.952273] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1104.455532] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1104.456814] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62684) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1104.456996] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.595s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1105.457638] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1105.457981] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Starting heal instance info cache {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1105.457981] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Rebuilding the list of instances to heal {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1105.960878] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Didn't find any instances for network info cache update. {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1105.961145] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1105.961287] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62684) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1106.300867] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1106.301097] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1106.301251] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1106.301400] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1108.300586] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1158.301464] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1162.800031] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1164.300755] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1164.301171] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1164.301171] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Starting heal instance info cache {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1164.301321] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Rebuilding the list of instances to heal {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1164.804317] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Didn't find any instances for network info cache update. {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1164.804587] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1165.307774] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1165.308207] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1165.308207] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1165.308312] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62684) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1165.309232] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-918baf57-72f4-44d6-9e2d-9c67e1390dbc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.317850] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d766f71-3f00-4e3c-976e-d5749ff99e01 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.331489] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23a206b1-6a27-4101-8c8a-6326250d985c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.337544] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cf9cfc1-8944-4850-be8f-8ea0d1c635cd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.364983] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181270MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=62684) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1165.365117] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1165.365299] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1166.384418] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1166.384704] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1166.398983] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba1c7742-ef49-43a4-b983-b1ea4fcf6c31 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.406727] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ed1e5d2-9a47-4f97-ad72-96897d3d9593 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.436642] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b692958-535c-4a81-838d-afac30b3ecd8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.444254] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7d59e3b-8cb6-42d1-84da-3aae54fb89f6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.457069] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1166.960052] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1166.961327] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62684) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1166.961710] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.596s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1166.961710] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1166.961874] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Cleaning up deleted instances with incomplete migration {{(pid=62684) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 1167.962485] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1167.962860] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1167.962860] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1167.963060] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62684) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1168.302247] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1168.302488] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1169.300806] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1170.301585] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1170.302054] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Cleaning up deleted instances {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 1170.805414] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] There are 0 instances to clean {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 1212.482668] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1212.988396] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Getting list of instances from cluster (obj){ [ 1212.988396] env[62684]: value = "domain-c8" [ 1212.988396] env[62684]: _type = "ClusterComputeResource" [ 1212.988396] env[62684]: } {{(pid=62684) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1212.989502] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5219198-d0bd-4e81-9c97-dc05b8db963e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.998478] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Got total of 0 instances {{(pid=62684) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1225.300819] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1225.301234] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Starting heal instance info cache {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1225.301234] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Rebuilding the list of instances to heal {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1225.805183] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Didn't find any instances for network info cache update. {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1225.805425] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1226.308228] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1226.308627] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1226.308627] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1226.308790] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62684) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1226.309657] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26ec3916-3204-47a9-a690-dcc748426bb7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.317902] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2cdd995-8aec-4f8d-988a-27ab746f354c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.331353] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60ee477b-b464-485e-9227-375cf45aa89f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.337131] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23f460f6-8878-4289-b863-6f13db0bab21 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.364322] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181268MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=62684) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1226.364454] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1226.364628] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1227.491591] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1227.491890] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1227.507918] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Refreshing inventories for resource provider c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1227.520551] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Updating ProviderTree inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1227.520753] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1227.532591] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Refreshing aggregate associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, aggregates: None {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1227.549551] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Refreshing trait associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1227.562828] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d468874-4453-4d8d-90a2-6b686d4c2c43 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.570583] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-320e5282-36ba-4521-8938-106e93d09933 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.601391] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ea1f3ac-8b46-44c0-b42d-bf732322f44e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.609171] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e030760d-5a79-46be-833c-d38ca2c42be0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.622684] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1228.125755] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1228.127153] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62684) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1228.127342] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.763s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1228.623390] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1228.623854] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1228.623854] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1228.623964] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1228.624082] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62684) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1229.301979] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1230.301514] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1230.301842] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1284.298054] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1285.300582] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1285.300829] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Starting heal instance info cache {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1285.300960] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Rebuilding the list of instances to heal {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1285.803907] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Didn't find any instances for network info cache update. {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1287.300488] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1287.300870] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1287.804423] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1287.804655] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1287.805034] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1287.805034] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62684) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1287.805934] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eee52cb-1461-4c7e-940f-01fcff8a2d4c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.814554] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f07def8c-7617-4a71-a459-9e70ba8e417f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.829024] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c40b8c97-5189-41ea-ab4f-8292426ed352 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.836217] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01c2fbb4-a432-473c-bb84-e1adb95c2a24 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.865409] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181267MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=62684) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1287.865648] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1287.865764] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1288.884515] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1288.884935] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1288.899087] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc37bd5b-189b-46c9-a035-eabc2b9f698d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.906758] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d43fd4b-4326-41c3-9f37-11b6061478b8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.937534] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38bf449c-cbb2-4306-a9f0-7adfaec44475 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.945431] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9f22471-49cf-4c7e-9377-558e803374cd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.958757] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1289.461776] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1289.462990] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62684) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1289.463207] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.597s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1290.463833] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1290.464166] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1290.464272] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1290.464367] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1290.464505] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62684) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1291.301883] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1291.302198] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1347.296632] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1347.300192] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1347.300332] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Starting heal instance info cache {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1347.300452] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Rebuilding the list of instances to heal {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1347.803865] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Didn't find any instances for network info cache update. {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1347.804586] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1348.307481] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1348.307868] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1348.307868] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1348.308018] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62684) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1348.308905] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d7092ab-3c89-4bd2-977c-f24ab54aa4f4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.317280] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a52f67e-5114-419b-a9e5-30cbac579246 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.332468] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9fca759-47cf-4ed4-869a-027a85715790 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.339141] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2218454c-0543-4b6f-8091-bde10a740265 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.367970] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181256MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=62684) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1348.368204] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1348.368336] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1349.386747] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1349.386977] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1349.401174] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2031b6f8-4d87-4e28-97ee-9a173664773e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.408666] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ebab39a-00a5-4edd-b6d8-539128e7f13a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.438305] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdf63ccf-833a-44f8-8139-0b321976b6b9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.444935] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d3d4d25-48b7-4087-985d-731b8e640768 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.457342] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1349.960474] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1349.961793] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62684) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1349.961967] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.594s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1350.458688] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1350.459257] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1350.459257] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62684) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1351.302521] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1352.302756] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1352.302756] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1353.301982] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1407.296328] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1407.296689] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1408.300540] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1408.300927] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Starting heal instance info cache {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1408.300927] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Rebuilding the list of instances to heal {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1408.803759] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Didn't find any instances for network info cache update. {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1408.803998] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1409.307055] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1409.307378] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1409.307474] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1409.307593] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62684) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1409.308501] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-845723a5-c5a0-4c34-ba4c-82a2a156c08b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.317278] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cbbc537-d4ff-4244-84ad-f92879eefa5e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.330618] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbd66139-de13-4413-aae0-cd9aca54b58b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.336627] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dee89582-97d9-4317-9ec5-f11194e67a21 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.364910] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181266MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=62684) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1409.365066] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1409.365235] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1410.382870] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1410.383128] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1410.396389] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e53cb247-41e9-402b-a655-ec93bcd39d30 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.403978] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb3b594b-3519-4744-b616-f697885bb049 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.432365] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57a6d0cc-8c9e-497e-85d4-a3ff5246629c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.439427] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91759708-bf8f-4f3f-a50d-8d3cd4f41037 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.452846] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1410.955790] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1410.957153] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62684) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1410.957337] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1411.454112] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1411.454487] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1411.454487] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1411.454653] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62684) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1413.301254] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1413.301588] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1413.301632] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1468.296766] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1469.300479] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1469.300788] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Starting heal instance info cache {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1469.300788] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Rebuilding the list of instances to heal {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1469.803998] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Didn't find any instances for network info cache update. {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1469.804262] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1469.804444] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1470.307511] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1470.307953] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1470.307953] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1470.308137] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62684) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1470.308946] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-682d0e41-48d8-4540-84f5-adf157b978d6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.316974] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1edc2fa-30e8-4ea7-913e-e40bbcf3f28c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.330576] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad572b58-74f8-4730-b28e-2fbd5900fd14 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.336529] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d5a47e8-4b14-45c3-b7a0-1832f77753e6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.364866] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181255MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=62684) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1470.365080] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1470.365218] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1471.383361] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1471.383619] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1471.396956] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74f07ffe-47ce-4f29-a39a-a172043a9cb1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.404517] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa34152-362f-4596-a756-32faab3de778 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.433914] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e01f119-e9b2-46d8-9de7-03451dd34d3d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.440928] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2388f52a-c1ce-4241-b98e-381778575cb5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.453451] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1471.956914] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1471.958194] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62684) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1471.958370] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.593s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1471.958586] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1472.957629] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1472.958037] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62684) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1473.300577] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1473.300808] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1473.300960] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1473.301142] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1473.301277] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Cleaning up deleted instances {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 1473.804877] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] There are 0 instances to clean {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 1474.805617] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1477.300980] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1477.301356] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Cleaning up deleted instances with incomplete migration {{(pid=62684) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 1528.800132] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1529.304713] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1529.304870] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Starting heal instance info cache {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1529.304970] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Rebuilding the list of instances to heal {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1529.809247] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Didn't find any instances for network info cache update. {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1529.809610] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1530.301506] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1530.301753] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1530.805163] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1530.805436] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1530.805574] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1530.805735] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62684) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1530.806702] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a18141da-c3a2-4f07-b754-fa1412ae8035 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.814988] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-675e62a1-3896-4bb4-a5ae-aaf40c9e1756 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.828448] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1494b79-a7a1-4b8e-ae44-15dd59761510 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.834344] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-199c127b-4e15-45ea-97f8-4d6017a32179 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.862465] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181238MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=62684) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1530.862602] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1530.862777] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1531.896352] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1531.896622] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1531.912113] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Refreshing inventories for resource provider c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1531.922872] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Updating ProviderTree inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1531.923067] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1531.932495] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Refreshing aggregate associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, aggregates: None {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1531.947477] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Refreshing trait associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1531.959377] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07a746ac-2d51-4ee8-8671-0af38223d3f4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.966916] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfa95705-20cf-4830-81c7-bc44fbc0b66f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.995583] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f12cad0-bbb3-4c41-b4bf-1f8dcef804f6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.002489] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e823771-7b4f-40c3-839e-bedc9c1f76d7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.015257] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1532.518097] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1532.519347] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62684) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1532.519528] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.657s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1533.518298] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1533.518645] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62684) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1534.300877] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1534.301110] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1535.300598] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1535.300902] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1589.301449] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1590.301368] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1590.301689] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Starting heal instance info cache {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1590.302139] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Rebuilding the list of instances to heal {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1590.804879] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Didn't find any instances for network info cache update. {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1590.805179] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1591.311552] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1591.311839] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1591.312012] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1591.312168] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62684) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1591.313055] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd78c82e-dbc5-49a4-a548-e7c6dec7a91e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.321915] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8071b5d-1d92-4cbd-bf22-bd9719b16691 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.336123] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5abd2638-cbc1-4e1a-a37a-7187b5d4d292 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.342148] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f20f8692-b314-4874-beca-7726cb67c0e8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.370359] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181243MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=62684) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1591.370510] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1591.370673] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1592.389442] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1592.389736] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1592.402502] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c057a207-f225-4846-bb73-19a3a4d5df6c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.409650] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62230c82-7222-4495-9082-039b2b04092e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.438362] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85771c08-95da-43a3-961e-12ea9c242930 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.444971] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aad0e4c9-36f4-44ae-9011-f73db4fcdccc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.457467] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1592.960760] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1592.962053] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62684) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1592.962238] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1593.957727] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1593.958113] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1593.958113] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62684) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1596.300939] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1596.301459] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1596.301459] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1597.300992] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1650.297656] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1650.802972] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1651.302055] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1651.302055] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Starting heal instance info cache {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1651.302055] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Rebuilding the list of instances to heal {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1651.805137] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Didn't find any instances for network info cache update. {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1652.300961] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1652.805471] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1652.805941] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1652.806165] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1652.806438] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62684) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1652.807762] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0dadfff-c3da-4c55-9d93-03ac2a27547f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.817572] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8df299a-8a15-42d3-b2df-bc7d5aa0f826 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.837123] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4440f733-aa49-4e2c-855b-30a0439ea1c7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.843482] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0f6a7f4-0dc5-4b76-af9e-9f8c9d4b9ef9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.888741] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181219MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=62684) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1652.888741] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1652.888741] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1653.905883] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1653.906217] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1653.918921] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef6d8bcb-e528-4255-ae30-fd26f6b39254 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.926507] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-309d6ac9-cdaa-4dbc-b24a-176975aa56eb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.955924] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a7978fd-d3e5-4d46-ba1d-d10401ef95d5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.962988] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5facbf3-c98c-4df9-b6da-c8fb7d48c202 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.976154] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1654.479023] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1654.480293] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62684) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1654.480471] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.593s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1655.476196] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1655.476516] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1655.476610] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62684) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1658.301433] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1658.301838] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1658.301951] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1658.302161] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1703.953286] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Acquiring lock "e4528a29-163d-4f5e-9497-6e6b90b290ba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1703.953605] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Lock "e4528a29-163d-4f5e-9497-6e6b90b290ba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1704.457125] env[62684]: DEBUG nova.compute.manager [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1704.712409] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Acquiring lock "0f9a525c-09b9-483e-b418-fea6e6e5dc4a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1704.712988] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Lock "0f9a525c-09b9-483e-b418-fea6e6e5dc4a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1704.999379] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1704.999696] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1705.002089] env[62684]: INFO nova.compute.claims [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1705.215868] env[62684]: DEBUG nova.compute.manager [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1705.747465] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1705.886266] env[62684]: DEBUG oslo_concurrency.lockutils [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Acquiring lock "c1580c72-9345-436e-b4f7-56d319248864" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1705.886266] env[62684]: DEBUG oslo_concurrency.lockutils [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Lock "c1580c72-9345-436e-b4f7-56d319248864" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1706.089411] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-024580f9-2277-4109-a6bf-5e9ca08b56cb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.100138] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3c63715-b135-4ef1-9ca4-37c8add935a4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.131982] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7c14a92-0245-4858-9dd7-d6f091164480 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.139592] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a078cf3d-12f4-4b1b-ad14-ba0df535ad66 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.158052] env[62684]: DEBUG nova.compute.provider_tree [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1706.389823] env[62684]: DEBUG nova.compute.manager [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1706.666314] env[62684]: DEBUG nova.scheduler.client.report [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1706.927341] env[62684]: DEBUG oslo_concurrency.lockutils [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1706.972408] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Acquiring lock "4cbcfa1a-c034-4de7-ad25-4ad22316067e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1706.972754] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Lock "4cbcfa1a-c034-4de7-ad25-4ad22316067e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1707.172370] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.173s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1707.173016] env[62684]: DEBUG nova.compute.manager [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1707.176458] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.429s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1707.178143] env[62684]: INFO nova.compute.claims [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1707.346313] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "8d53d8c3-6db8-4ebe-a35f-0f64602fafcb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1707.346800] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "8d53d8c3-6db8-4ebe-a35f-0f64602fafcb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1707.476457] env[62684]: DEBUG nova.compute.manager [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1707.683715] env[62684]: DEBUG nova.compute.utils [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1707.690034] env[62684]: DEBUG nova.compute.manager [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1707.690034] env[62684]: DEBUG nova.network.neutron [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1707.854767] env[62684]: DEBUG nova.compute.manager [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1708.019112] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1708.197196] env[62684]: DEBUG nova.compute.manager [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1708.281429] env[62684]: DEBUG nova.policy [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3bda315fe1964056a2d35cb30759f825', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ae9095270e954407a67df2328e6f76ac', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1708.384900] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1708.399544] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4a29c71-c5ed-442a-af9b-5e1dd1d9ed06 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.408848] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-283166a5-7106-45ed-870b-6f5dee58b295 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.422577] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquiring lock "0676806b-c1f0-4c1a-a12d-add2edf1588f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1708.422577] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Lock "0676806b-c1f0-4c1a-a12d-add2edf1588f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1708.455126] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baffe84a-0042-4295-9892-e1a4140fe57a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.465832] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18c8b700-725a-47b1-8d44-4bdf55927120 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.483509] env[62684]: DEBUG nova.compute.provider_tree [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1708.505749] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Acquiring lock "91869c00-edd0-40a8-84df-d8842d750558" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1708.506018] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Lock "91869c00-edd0-40a8-84df-d8842d750558" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1708.843612] env[62684]: DEBUG nova.network.neutron [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Successfully created port: 0e21d0c2-c9f1-4885-ba7f-b8e64973c91f {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1708.927371] env[62684]: DEBUG nova.compute.manager [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1708.991058] env[62684]: DEBUG nova.scheduler.client.report [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1709.015350] env[62684]: DEBUG nova.compute.manager [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1709.213318] env[62684]: DEBUG nova.compute.manager [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1709.241151] env[62684]: DEBUG nova.virt.hardware [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1709.241479] env[62684]: DEBUG nova.virt.hardware [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1709.241567] env[62684]: DEBUG nova.virt.hardware [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1709.241727] env[62684]: DEBUG nova.virt.hardware [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1709.241871] env[62684]: DEBUG nova.virt.hardware [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1709.242844] env[62684]: DEBUG nova.virt.hardware [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1709.243500] env[62684]: DEBUG nova.virt.hardware [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1709.244024] env[62684]: DEBUG nova.virt.hardware [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1709.244024] env[62684]: DEBUG nova.virt.hardware [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1709.244310] env[62684]: DEBUG nova.virt.hardware [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1709.244542] env[62684]: DEBUG nova.virt.hardware [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1709.246125] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21124ad3-09da-44f3-92e2-2bd35e84f8f1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.256801] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67ff471f-899f-4e8c-8767-bfca44b9307c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.285022] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bd3df7f-a577-4fe1-987e-6413557615d2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.464861] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1709.501972] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.325s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1709.502247] env[62684]: DEBUG nova.compute.manager [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1709.507029] env[62684]: DEBUG oslo_concurrency.lockutils [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.578s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1709.507029] env[62684]: INFO nova.compute.claims [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1709.543714] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1710.014233] env[62684]: DEBUG nova.compute.utils [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1710.022464] env[62684]: DEBUG nova.compute.manager [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1710.022464] env[62684]: DEBUG nova.network.neutron [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1710.127239] env[62684]: DEBUG nova.policy [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd9599177031e45839cd838f98894f572', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c3a7760a18d54bc4b8b4fd291e127381', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1710.301329] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1710.531822] env[62684]: DEBUG nova.compute.manager [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1710.677360] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c88e7b64-c362-4bce-b882-0737a6d5fc85 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.687171] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3902dc9-d934-482d-b90b-033326cb1fef {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.727885] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-908f850d-a519-4fae-94e6-134018dc1ad4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.738230] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30c0d7a0-1f30-4773-84c1-918cfefe2b2f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.758082] env[62684]: DEBUG nova.compute.provider_tree [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1710.835032] env[62684]: DEBUG nova.network.neutron [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Successfully created port: 5868c4da-5351-4d35-8886-12ba976894db {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1711.032750] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Acquiring lock "effc673a-103f-413b-88ac-6907ad1ee852" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1711.032986] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Lock "effc673a-103f-413b-88ac-6907ad1ee852" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1711.264464] env[62684]: DEBUG nova.scheduler.client.report [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1711.536234] env[62684]: DEBUG nova.compute.manager [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1711.542677] env[62684]: DEBUG nova.compute.manager [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1711.581958] env[62684]: DEBUG nova.virt.hardware [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1711.581958] env[62684]: DEBUG nova.virt.hardware [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1711.581958] env[62684]: DEBUG nova.virt.hardware [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1711.583359] env[62684]: DEBUG nova.virt.hardware [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1711.583359] env[62684]: DEBUG nova.virt.hardware [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1711.583359] env[62684]: DEBUG nova.virt.hardware [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1711.583359] env[62684]: DEBUG nova.virt.hardware [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1711.583359] env[62684]: DEBUG nova.virt.hardware [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1711.583540] env[62684]: DEBUG nova.virt.hardware [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1711.583540] env[62684]: DEBUG nova.virt.hardware [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1711.583598] env[62684]: DEBUG nova.virt.hardware [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1711.587760] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-006873c5-ef0f-4e33-82d2-66eebffeb641 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.596805] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-033dbcce-ab36-45d2-b0da-2513a472eae4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.778255] env[62684]: DEBUG oslo_concurrency.lockutils [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.273s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1711.779740] env[62684]: DEBUG nova.compute.manager [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1711.785087] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.766s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1711.786612] env[62684]: INFO nova.compute.claims [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1711.794986] env[62684]: DEBUG nova.network.neutron [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Successfully updated port: 0e21d0c2-c9f1-4885-ba7f-b8e64973c91f {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1712.072459] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1712.292278] env[62684]: DEBUG nova.compute.utils [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1712.293728] env[62684]: DEBUG nova.compute.manager [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1712.294076] env[62684]: DEBUG nova.network.neutron [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1712.301855] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1712.301855] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Starting heal instance info cache {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1712.301855] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Rebuilding the list of instances to heal {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1712.301855] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Acquiring lock "refresh_cache-e4528a29-163d-4f5e-9497-6e6b90b290ba" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1712.301855] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Acquired lock "refresh_cache-e4528a29-163d-4f5e-9497-6e6b90b290ba" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1712.301855] env[62684]: DEBUG nova.network.neutron [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1712.370617] env[62684]: DEBUG nova.policy [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '97c98e6c804749c2bbff24c950077904', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '031ad4f1e3014ebfa6eccf32481585f2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1712.714537] env[62684]: DEBUG nova.network.neutron [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Successfully created port: c089de91-ca6e-40fe-8783-5b1644292445 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1712.800639] env[62684]: DEBUG nova.compute.manager [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1712.812246] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Skipping network cache update for instance because it is Building. {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1712.812246] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Skipping network cache update for instance because it is Building. {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1712.812246] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: c1580c72-9345-436e-b4f7-56d319248864] Skipping network cache update for instance because it is Building. {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1712.812246] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Skipping network cache update for instance because it is Building. {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1712.812246] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Didn't find any instances for network info cache update. {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1712.818308] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1712.932204] env[62684]: DEBUG nova.network.neutron [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1713.013211] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7805ff6-db1e-4ab5-945b-40cc9513d360 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.032048] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-393af70e-9ad5-4cf8-a486-1ae39cbc66a4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.069208] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5cd7a50-ec14-4c01-ab74-a7e3f787a754 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.077916] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a0daa5c-c07f-4562-af73-7ba6919d0d2b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.092783] env[62684]: DEBUG nova.compute.provider_tree [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1713.320511] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1713.597030] env[62684]: DEBUG nova.scheduler.client.report [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1713.637607] env[62684]: DEBUG nova.network.neutron [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Updating instance_info_cache with network_info: [{"id": "0e21d0c2-c9f1-4885-ba7f-b8e64973c91f", "address": "fa:16:3e:49:a8:66", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e21d0c2-c9", "ovs_interfaceid": "0e21d0c2-c9f1-4885-ba7f-b8e64973c91f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1713.751059] env[62684]: DEBUG nova.network.neutron [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Successfully updated port: 5868c4da-5351-4d35-8886-12ba976894db {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1713.768159] env[62684]: DEBUG nova.compute.manager [req-16ef0e2c-7d98-46bd-8f77-ed94a98c90a3 req-af36725a-3d05-48ee-b1f2-2b824c0d2bb5 service nova] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Received event network-vif-plugged-0e21d0c2-c9f1-4885-ba7f-b8e64973c91f {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1713.768264] env[62684]: DEBUG oslo_concurrency.lockutils [req-16ef0e2c-7d98-46bd-8f77-ed94a98c90a3 req-af36725a-3d05-48ee-b1f2-2b824c0d2bb5 service nova] Acquiring lock "e4528a29-163d-4f5e-9497-6e6b90b290ba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1713.768455] env[62684]: DEBUG oslo_concurrency.lockutils [req-16ef0e2c-7d98-46bd-8f77-ed94a98c90a3 req-af36725a-3d05-48ee-b1f2-2b824c0d2bb5 service nova] Lock "e4528a29-163d-4f5e-9497-6e6b90b290ba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1713.768607] env[62684]: DEBUG oslo_concurrency.lockutils [req-16ef0e2c-7d98-46bd-8f77-ed94a98c90a3 req-af36725a-3d05-48ee-b1f2-2b824c0d2bb5 service nova] Lock "e4528a29-163d-4f5e-9497-6e6b90b290ba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1713.768772] env[62684]: DEBUG nova.compute.manager [req-16ef0e2c-7d98-46bd-8f77-ed94a98c90a3 req-af36725a-3d05-48ee-b1f2-2b824c0d2bb5 service nova] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] No waiting events found dispatching network-vif-plugged-0e21d0c2-c9f1-4885-ba7f-b8e64973c91f {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1713.768933] env[62684]: WARNING nova.compute.manager [req-16ef0e2c-7d98-46bd-8f77-ed94a98c90a3 req-af36725a-3d05-48ee-b1f2-2b824c0d2bb5 service nova] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Received unexpected event network-vif-plugged-0e21d0c2-c9f1-4885-ba7f-b8e64973c91f for instance with vm_state building and task_state spawning. [ 1713.819159] env[62684]: DEBUG nova.compute.manager [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1713.849878] env[62684]: DEBUG nova.virt.hardware [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1713.849878] env[62684]: DEBUG nova.virt.hardware [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1713.850055] env[62684]: DEBUG nova.virt.hardware [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1713.850990] env[62684]: DEBUG nova.virt.hardware [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1713.850990] env[62684]: DEBUG nova.virt.hardware [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1713.851167] env[62684]: DEBUG nova.virt.hardware [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1713.851796] env[62684]: DEBUG nova.virt.hardware [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1713.855522] env[62684]: DEBUG nova.virt.hardware [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1713.855522] env[62684]: DEBUG nova.virt.hardware [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1713.855522] env[62684]: DEBUG nova.virt.hardware [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1713.855522] env[62684]: DEBUG nova.virt.hardware [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1713.855522] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e848dc9-af6d-4654-9c73-624f007d3ec1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.864027] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7dcb6cc-4286-42d1-980a-c3bd191b38fd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.104804] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.318s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1714.104804] env[62684]: DEBUG nova.compute.manager [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1714.106741] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.722s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1714.108511] env[62684]: INFO nova.compute.claims [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1714.140318] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Releasing lock "refresh_cache-e4528a29-163d-4f5e-9497-6e6b90b290ba" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1714.140413] env[62684]: DEBUG nova.compute.manager [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Instance network_info: |[{"id": "0e21d0c2-c9f1-4885-ba7f-b8e64973c91f", "address": "fa:16:3e:49:a8:66", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e21d0c2-c9", "ovs_interfaceid": "0e21d0c2-c9f1-4885-ba7f-b8e64973c91f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1714.142398] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:49:a8:66', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ded18042-834c-4792-b3e8-b1c377446432', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0e21d0c2-c9f1-4885-ba7f-b8e64973c91f', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1714.162960] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1714.163015] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dfe9e696-8209-4177-91a7-28d60ae6b40d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.180333] env[62684]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1714.180333] env[62684]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62684) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1714.180333] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Folder already exists: OpenStack. Parent ref: group-v4. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1714.180333] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Creating folder: Project (ae9095270e954407a67df2328e6f76ac). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1714.180333] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7c1290d5-0748-4db5-b82b-58ecda5c79e6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.191479] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Created folder: Project (ae9095270e954407a67df2328e6f76ac) in parent group-v421118. [ 1714.191679] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Creating folder: Instances. Parent ref: group-v421122. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1714.191907] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8d6ae40d-2726-485f-9472-5e5f39b7705a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.200504] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Created folder: Instances in parent group-v421122. [ 1714.200778] env[62684]: DEBUG oslo.service.loopingcall [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1714.200966] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1714.201187] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ce3aff78-eacf-4ada-a345-2a031e12e9d8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.220623] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1714.220623] env[62684]: value = "task-2052270" [ 1714.220623] env[62684]: _type = "Task" [ 1714.220623] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1714.230167] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052270, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1714.260999] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Acquiring lock "refresh_cache-0f9a525c-09b9-483e-b418-fea6e6e5dc4a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1714.261176] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Acquired lock "refresh_cache-0f9a525c-09b9-483e-b418-fea6e6e5dc4a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1714.261287] env[62684]: DEBUG nova.network.neutron [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1714.618021] env[62684]: DEBUG nova.compute.utils [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1714.621801] env[62684]: DEBUG nova.compute.manager [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1714.621983] env[62684]: DEBUG nova.network.neutron [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1714.734312] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052270, 'name': CreateVM_Task, 'duration_secs': 0.445908} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1714.734312] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1714.754856] env[62684]: DEBUG oslo_vmware.service [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea6b1568-fd20-4e92-ae68-c0200ec8b833 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.765033] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1714.765033] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1714.765033] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1714.770874] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c8cccac-1275-41f7-a310-5688ca3f8dc4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.776414] env[62684]: DEBUG oslo_vmware.api [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Waiting for the task: (returnval){ [ 1714.776414] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d0ed3e-b7f4-a485-1e11-006e4325bd26" [ 1714.776414] env[62684]: _type = "Task" [ 1714.776414] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1714.789715] env[62684]: DEBUG oslo_vmware.api [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d0ed3e-b7f4-a485-1e11-006e4325bd26, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1714.870218] env[62684]: DEBUG nova.policy [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1fba7614b20e4e6280af728ba1dc6fe3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bcd3c908bccd421292836d1cde1fc5e3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1714.891255] env[62684]: DEBUG nova.network.neutron [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1715.123376] env[62684]: DEBUG nova.compute.manager [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1715.291892] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1715.293799] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1715.293799] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1715.293799] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1715.293799] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1715.293799] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fa81a5cd-b282-4112-8a77-9bb99a65534a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.302705] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1715.302893] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1715.305279] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c75e518f-8ae8-4539-8f4b-2ecb22943463 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.311474] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8b35b83-fb06-49a5-b1d9-9b37d53fc863 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.325370] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef3b1677-04e6-4573-be1b-85b66c3ebfa1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.330727] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cc9b1a7-126f-4dc3-add1-c37cb73deae2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.335969] env[62684]: DEBUG oslo_vmware.api [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Waiting for the task: (returnval){ [ 1715.335969] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5285a357-ae47-e20e-bb05-4459dba818fe" [ 1715.335969] env[62684]: _type = "Task" [ 1715.335969] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1715.370155] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d4e0873-fd5a-43a0-83d8-d35fc8372bb9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.378332] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Preparing fetch location {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1715.378332] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Creating directory with path [datastore1] vmware_temp/ff316d5b-e35f-4994-9f0f-9379945dfcc5/3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1715.378332] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-80db6e24-813d-488e-9d60-6cf4c6d64cc4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.384409] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b899e483-5477-45f4-bf6e-68009249483c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.399786] env[62684]: DEBUG nova.compute.provider_tree [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1715.415507] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Created directory with path [datastore1] vmware_temp/ff316d5b-e35f-4994-9f0f-9379945dfcc5/3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1715.415765] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Fetch image to [datastore1] vmware_temp/ff316d5b-e35f-4994-9f0f-9379945dfcc5/3931321c-cb4c-4b87-8d3a-50e05ea01db2/tmp-sparse.vmdk {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1715.415960] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Downloading image file data 3931321c-cb4c-4b87-8d3a-50e05ea01db2 to [datastore1] vmware_temp/ff316d5b-e35f-4994-9f0f-9379945dfcc5/3931321c-cb4c-4b87-8d3a-50e05ea01db2/tmp-sparse.vmdk on the data store datastore1 {{(pid=62684) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1715.419575] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f974725-a1c6-4db7-8e03-20e482283e15 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.426824] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4bc6e19-9304-4fbc-b939-d1c339844d1d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.443657] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-001a9946-7023-4518-89ab-50e1d768f1c3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.491960] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7178a06a-c5b5-45c7-8922-a5f944a81437 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.498868] env[62684]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9e759a03-cdf7-4e84-9abd-1ff2a1a3cf24 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.591380] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Downloading image file data 3931321c-cb4c-4b87-8d3a-50e05ea01db2 to the data store datastore1 {{(pid=62684) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1715.711104] env[62684]: DEBUG oslo_vmware.rw_handles [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ff316d5b-e35f-4994-9f0f-9379945dfcc5/3931321c-cb4c-4b87-8d3a-50e05ea01db2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62684) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1715.927828] env[62684]: ERROR nova.scheduler.client.report [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [req-d9149667-bafa-4c78-9b6b-f9211d20b212] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c23c281e-ec1f-4876-972e-a98655f2084f. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d9149667-bafa-4c78-9b6b-f9211d20b212"}]} [ 1715.947171] env[62684]: DEBUG nova.network.neutron [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Updating instance_info_cache with network_info: [{"id": "5868c4da-5351-4d35-8886-12ba976894db", "address": "fa:16:3e:26:ba:29", "network": {"id": "ad517579-bdcb-4ccc-8e16-74f3524aa5f6", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1045737469-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c3a7760a18d54bc4b8b4fd291e127381", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "111a2767-1b06-4fe5-852b-40c9b5a699fd", "external-id": "nsx-vlan-transportzone-975", "segmentation_id": 975, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5868c4da-53", "ovs_interfaceid": "5868c4da-5351-4d35-8886-12ba976894db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1715.966210] env[62684]: DEBUG nova.scheduler.client.report [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Refreshing inventories for resource provider c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1715.998158] env[62684]: DEBUG nova.scheduler.client.report [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Updating ProviderTree inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1715.998412] env[62684]: DEBUG nova.compute.provider_tree [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1716.029509] env[62684]: DEBUG nova.scheduler.client.report [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Refreshing aggregate associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, aggregates: None {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1716.062760] env[62684]: DEBUG nova.scheduler.client.report [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Refreshing trait associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1716.160097] env[62684]: DEBUG nova.compute.manager [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1716.215894] env[62684]: DEBUG nova.virt.hardware [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1716.216161] env[62684]: DEBUG nova.virt.hardware [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1716.216313] env[62684]: DEBUG nova.virt.hardware [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1716.216494] env[62684]: DEBUG nova.virt.hardware [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1716.216631] env[62684]: DEBUG nova.virt.hardware [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1716.216770] env[62684]: DEBUG nova.virt.hardware [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1716.216970] env[62684]: DEBUG nova.virt.hardware [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1716.217139] env[62684]: DEBUG nova.virt.hardware [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1716.217299] env[62684]: DEBUG nova.virt.hardware [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1716.217476] env[62684]: DEBUG nova.virt.hardware [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1716.217654] env[62684]: DEBUG nova.virt.hardware [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1716.218577] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a7b37f1-2f81-4d13-a4b9-bd46583c1b65 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.232081] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58d26102-669a-4831-83f2-4f2dd1c5718d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.316548] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da0f3bf1-cc71-4e26-9179-17fe1f77c144 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.325251] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b402fb38-d3d1-4e50-817c-03a5f50a4b74 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.367312] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6458538d-ea02-4f4d-8d42-5c4430dec71b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.381978] env[62684]: DEBUG oslo_vmware.rw_handles [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Completed reading data from the image iterator. {{(pid=62684) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1716.381978] env[62684]: DEBUG oslo_vmware.rw_handles [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ff316d5b-e35f-4994-9f0f-9379945dfcc5/3931321c-cb4c-4b87-8d3a-50e05ea01db2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62684) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1716.381978] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ee405d0-b782-44e6-b461-4f0751696eb4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.401071] env[62684]: DEBUG nova.compute.provider_tree [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1716.450407] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Releasing lock "refresh_cache-0f9a525c-09b9-483e-b418-fea6e6e5dc4a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1716.451069] env[62684]: DEBUG nova.compute.manager [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Instance network_info: |[{"id": "5868c4da-5351-4d35-8886-12ba976894db", "address": "fa:16:3e:26:ba:29", "network": {"id": "ad517579-bdcb-4ccc-8e16-74f3524aa5f6", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1045737469-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c3a7760a18d54bc4b8b4fd291e127381", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "111a2767-1b06-4fe5-852b-40c9b5a699fd", "external-id": "nsx-vlan-transportzone-975", "segmentation_id": 975, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5868c4da-53", "ovs_interfaceid": "5868c4da-5351-4d35-8886-12ba976894db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1716.451252] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:26:ba:29', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '111a2767-1b06-4fe5-852b-40c9b5a699fd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5868c4da-5351-4d35-8886-12ba976894db', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1716.461769] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Creating folder: Project (c3a7760a18d54bc4b8b4fd291e127381). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1716.462453] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-be499817-b408-4fba-86dc-a066371463d7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.473716] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Created folder: Project (c3a7760a18d54bc4b8b4fd291e127381) in parent group-v421118. [ 1716.473895] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Creating folder: Instances. Parent ref: group-v421125. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1716.474224] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5c934407-c060-4355-9ee4-ec5ddb28c318 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.482591] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Created folder: Instances in parent group-v421125. [ 1716.482874] env[62684]: DEBUG oslo.service.loopingcall [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1716.483024] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1716.484952] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5df02297-eb64-4231-81c9-39be86888120 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.503711] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1716.503711] env[62684]: value = "task-2052274" [ 1716.503711] env[62684]: _type = "Task" [ 1716.503711] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1716.521868] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052274, 'name': CreateVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.524565] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Downloaded image file data 3931321c-cb4c-4b87-8d3a-50e05ea01db2 to vmware_temp/ff316d5b-e35f-4994-9f0f-9379945dfcc5/3931321c-cb4c-4b87-8d3a-50e05ea01db2/tmp-sparse.vmdk on the data store datastore1 {{(pid=62684) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1716.530284] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Caching image {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1716.530284] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Copying Virtual Disk [datastore1] vmware_temp/ff316d5b-e35f-4994-9f0f-9379945dfcc5/3931321c-cb4c-4b87-8d3a-50e05ea01db2/tmp-sparse.vmdk to [datastore1] vmware_temp/ff316d5b-e35f-4994-9f0f-9379945dfcc5/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1716.530284] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cebea7a5-5e1c-4d40-a81c-c61738180c45 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.538548] env[62684]: DEBUG oslo_vmware.api [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Waiting for the task: (returnval){ [ 1716.538548] env[62684]: value = "task-2052275" [ 1716.538548] env[62684]: _type = "Task" [ 1716.538548] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1716.552429] env[62684]: DEBUG oslo_vmware.api [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Task: {'id': task-2052275, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.680498] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Acquiring lock "fb7f38a0-bcfa-4d96-bde3-20d6f1d70112" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1716.680730] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Lock "fb7f38a0-bcfa-4d96-bde3-20d6f1d70112" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1716.851665] env[62684]: DEBUG nova.network.neutron [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Successfully updated port: c089de91-ca6e-40fe-8783-5b1644292445 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1716.951284] env[62684]: ERROR nova.scheduler.client.report [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [req-a0371d45-867d-429c-9e2f-3145e5a13d6a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c23c281e-ec1f-4876-972e-a98655f2084f. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a0371d45-867d-429c-9e2f-3145e5a13d6a"}]} [ 1716.976737] env[62684]: DEBUG nova.scheduler.client.report [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Refreshing inventories for resource provider c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1717.015179] env[62684]: DEBUG nova.scheduler.client.report [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Updating ProviderTree inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1717.015179] env[62684]: DEBUG nova.compute.provider_tree [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1717.022962] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052274, 'name': CreateVM_Task, 'duration_secs': 0.372336} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1717.023170] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1717.023889] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1717.024269] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1717.024659] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1717.024881] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c23ca7b-f1bb-4aad-89d7-35e6944a219b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.029987] env[62684]: DEBUG oslo_vmware.api [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Waiting for the task: (returnval){ [ 1717.029987] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521e7f52-56c1-6cca-28bf-7cb0fd90c961" [ 1717.029987] env[62684]: _type = "Task" [ 1717.029987] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1717.038168] env[62684]: DEBUG oslo_vmware.api [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521e7f52-56c1-6cca-28bf-7cb0fd90c961, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.042346] env[62684]: DEBUG nova.scheduler.client.report [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Refreshing aggregate associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, aggregates: None {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1717.052629] env[62684]: DEBUG oslo_vmware.api [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Task: {'id': task-2052275, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.075442] env[62684]: DEBUG nova.scheduler.client.report [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Refreshing trait associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1717.188386] env[62684]: DEBUG nova.compute.manager [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1717.221717] env[62684]: DEBUG nova.network.neutron [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Successfully created port: 53627d03-ce5e-439d-9055-b5abf4b099ed {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1717.262151] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a770553f-7c01-4e65-a3ce-db90bb17e86e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.276316] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf2d0723-7cc6-43f4-bf16-61748dc6c3cc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.315155] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-184dcf5b-8795-4af7-bf94-6e3355eece27 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.322853] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4af1c760-21b2-4d95-9c1f-52d37c6fee60 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.339297] env[62684]: DEBUG nova.compute.provider_tree [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1717.351876] env[62684]: DEBUG oslo_concurrency.lockutils [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Acquiring lock "refresh_cache-c1580c72-9345-436e-b4f7-56d319248864" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1717.352127] env[62684]: DEBUG oslo_concurrency.lockutils [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Acquired lock "refresh_cache-c1580c72-9345-436e-b4f7-56d319248864" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1717.352295] env[62684]: DEBUG nova.network.neutron [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1717.551411] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1717.551687] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1717.551886] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1717.555842] env[62684]: DEBUG nova.compute.manager [req-52f6dfd9-0d76-45ec-94c1-6b8484eda350 req-ca9ed196-3f12-4600-864d-e047dc70ed68 service nova] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Received event network-changed-0e21d0c2-c9f1-4885-ba7f-b8e64973c91f {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1717.556030] env[62684]: DEBUG nova.compute.manager [req-52f6dfd9-0d76-45ec-94c1-6b8484eda350 req-ca9ed196-3f12-4600-864d-e047dc70ed68 service nova] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Refreshing instance network info cache due to event network-changed-0e21d0c2-c9f1-4885-ba7f-b8e64973c91f. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1717.556246] env[62684]: DEBUG oslo_concurrency.lockutils [req-52f6dfd9-0d76-45ec-94c1-6b8484eda350 req-ca9ed196-3f12-4600-864d-e047dc70ed68 service nova] Acquiring lock "refresh_cache-e4528a29-163d-4f5e-9497-6e6b90b290ba" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1717.557239] env[62684]: DEBUG oslo_concurrency.lockutils [req-52f6dfd9-0d76-45ec-94c1-6b8484eda350 req-ca9ed196-3f12-4600-864d-e047dc70ed68 service nova] Acquired lock "refresh_cache-e4528a29-163d-4f5e-9497-6e6b90b290ba" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1717.557239] env[62684]: DEBUG nova.network.neutron [req-52f6dfd9-0d76-45ec-94c1-6b8484eda350 req-ca9ed196-3f12-4600-864d-e047dc70ed68 service nova] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Refreshing network info cache for port 0e21d0c2-c9f1-4885-ba7f-b8e64973c91f {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1717.564917] env[62684]: DEBUG oslo_vmware.api [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Task: {'id': task-2052275, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.771798} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1717.564917] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Copied Virtual Disk [datastore1] vmware_temp/ff316d5b-e35f-4994-9f0f-9379945dfcc5/3931321c-cb4c-4b87-8d3a-50e05ea01db2/tmp-sparse.vmdk to [datastore1] vmware_temp/ff316d5b-e35f-4994-9f0f-9379945dfcc5/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1717.564917] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Deleting the datastore file [datastore1] vmware_temp/ff316d5b-e35f-4994-9f0f-9379945dfcc5/3931321c-cb4c-4b87-8d3a-50e05ea01db2/tmp-sparse.vmdk {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1717.564917] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-529bcad9-45d9-4625-ab61-1adea2b56c75 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.572644] env[62684]: DEBUG oslo_vmware.api [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Waiting for the task: (returnval){ [ 1717.572644] env[62684]: value = "task-2052276" [ 1717.572644] env[62684]: _type = "Task" [ 1717.572644] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1717.582258] env[62684]: DEBUG oslo_vmware.api [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Task: {'id': task-2052276, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.718893] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1717.886023] env[62684]: DEBUG nova.scheduler.client.report [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Updated inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f with generation 13 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1717.886365] env[62684]: DEBUG nova.compute.provider_tree [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Updating resource provider c23c281e-ec1f-4876-972e-a98655f2084f generation from 13 to 14 during operation: update_inventory {{(pid=62684) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1717.886590] env[62684]: DEBUG nova.compute.provider_tree [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1718.010993] env[62684]: DEBUG nova.network.neutron [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1718.085089] env[62684]: DEBUG oslo_vmware.api [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Task: {'id': task-2052276, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.058249} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1718.085436] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1718.085659] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Moving file from [datastore1] vmware_temp/ff316d5b-e35f-4994-9f0f-9379945dfcc5/3931321c-cb4c-4b87-8d3a-50e05ea01db2 to [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2. {{(pid=62684) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 1718.085963] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-8411d15f-cf48-421f-a09b-eec1c08bd36e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.094342] env[62684]: DEBUG oslo_vmware.api [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Waiting for the task: (returnval){ [ 1718.094342] env[62684]: value = "task-2052277" [ 1718.094342] env[62684]: _type = "Task" [ 1718.094342] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1718.102513] env[62684]: DEBUG oslo_vmware.api [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Task: {'id': task-2052277, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.397193] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.290s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1718.397802] env[62684]: DEBUG nova.compute.manager [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1718.400546] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.936s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1718.401975] env[62684]: INFO nova.compute.claims [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1718.607276] env[62684]: DEBUG oslo_vmware.api [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Task: {'id': task-2052277, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.07073} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1718.607453] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] File moved {{(pid=62684) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 1718.607664] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Cleaning up location [datastore1] vmware_temp/ff316d5b-e35f-4994-9f0f-9379945dfcc5 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1718.608010] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Deleting the datastore file [datastore1] vmware_temp/ff316d5b-e35f-4994-9f0f-9379945dfcc5 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1718.608330] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-14f19776-872f-49e3-b64d-d6b3a33e2594 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.614363] env[62684]: DEBUG oslo_vmware.api [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Waiting for the task: (returnval){ [ 1718.614363] env[62684]: value = "task-2052279" [ 1718.614363] env[62684]: _type = "Task" [ 1718.614363] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1718.622743] env[62684]: DEBUG oslo_vmware.api [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Task: {'id': task-2052279, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.807174] env[62684]: DEBUG nova.network.neutron [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Updating instance_info_cache with network_info: [{"id": "c089de91-ca6e-40fe-8783-5b1644292445", "address": "fa:16:3e:de:7f:6c", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.66", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc089de91-ca", "ovs_interfaceid": "c089de91-ca6e-40fe-8783-5b1644292445", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1718.910632] env[62684]: DEBUG nova.compute.utils [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1718.918733] env[62684]: DEBUG nova.compute.manager [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1718.919042] env[62684]: DEBUG nova.network.neutron [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1719.122293] env[62684]: DEBUG nova.policy [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0c9327f7394249948899bf76e1837d36', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7855def9d0aa49abb7003ee504b9ccaf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1719.130976] env[62684]: DEBUG oslo_vmware.api [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Task: {'id': task-2052279, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.023596} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.131374] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1719.132407] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e14e466-3fd9-415a-86c0-da5d60e122bf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.138571] env[62684]: DEBUG oslo_vmware.api [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Waiting for the task: (returnval){ [ 1719.138571] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b764e2-774c-7336-9f61-b8a67d70f721" [ 1719.138571] env[62684]: _type = "Task" [ 1719.138571] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.154557] env[62684]: DEBUG oslo_vmware.api [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b764e2-774c-7336-9f61-b8a67d70f721, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.171928] env[62684]: DEBUG nova.network.neutron [req-52f6dfd9-0d76-45ec-94c1-6b8484eda350 req-ca9ed196-3f12-4600-864d-e047dc70ed68 service nova] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Updated VIF entry in instance network info cache for port 0e21d0c2-c9f1-4885-ba7f-b8e64973c91f. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1719.173148] env[62684]: DEBUG nova.network.neutron [req-52f6dfd9-0d76-45ec-94c1-6b8484eda350 req-ca9ed196-3f12-4600-864d-e047dc70ed68 service nova] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Updating instance_info_cache with network_info: [{"id": "0e21d0c2-c9f1-4885-ba7f-b8e64973c91f", "address": "fa:16:3e:49:a8:66", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e21d0c2-c9", "ovs_interfaceid": "0e21d0c2-c9f1-4885-ba7f-b8e64973c91f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1719.309926] env[62684]: DEBUG oslo_concurrency.lockutils [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Releasing lock "refresh_cache-c1580c72-9345-436e-b4f7-56d319248864" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1719.310130] env[62684]: DEBUG nova.compute.manager [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Instance network_info: |[{"id": "c089de91-ca6e-40fe-8783-5b1644292445", "address": "fa:16:3e:de:7f:6c", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.66", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc089de91-ca", "ovs_interfaceid": "c089de91-ca6e-40fe-8783-5b1644292445", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1719.310548] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:de:7f:6c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ded18042-834c-4792-b3e8-b1c377446432', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c089de91-ca6e-40fe-8783-5b1644292445', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1719.325760] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Creating folder: Project (031ad4f1e3014ebfa6eccf32481585f2). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1719.328736] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0ffb963e-2298-4da5-9dae-9e50bef36ef3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.342318] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Created folder: Project (031ad4f1e3014ebfa6eccf32481585f2) in parent group-v421118. [ 1719.342620] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Creating folder: Instances. Parent ref: group-v421129. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1719.343075] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a5a3445f-ac58-4c98-bac0-54a1cd4cd32e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.354714] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Created folder: Instances in parent group-v421129. [ 1719.354927] env[62684]: DEBUG oslo.service.loopingcall [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1719.355597] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1580c72-9345-436e-b4f7-56d319248864] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1719.355597] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b7dcfe16-3f48-430f-b400-d74a641073a3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.379193] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1719.379193] env[62684]: value = "task-2052282" [ 1719.379193] env[62684]: _type = "Task" [ 1719.379193] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.391431] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052282, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.421881] env[62684]: DEBUG nova.compute.manager [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1719.605304] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d405cc8-60e6-4f54-ada7-0e73168f1533 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.614400] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-857476fb-dda8-4beb-8ac2-f3a5e9a0d21b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.657373] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e5fe8b6-570e-4f75-aa7f-9295ea6bc2f9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.667229] env[62684]: DEBUG oslo_vmware.api [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b764e2-774c-7336-9f61-b8a67d70f721, 'name': SearchDatastore_Task, 'duration_secs': 0.010398} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.671249] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1719.671803] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] e4528a29-163d-4f5e-9497-6e6b90b290ba/e4528a29-163d-4f5e-9497-6e6b90b290ba.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1719.672475] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1719.672720] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1719.672963] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d8ba1023-498e-4493-81af-be315493fe00 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.677158] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52476ef1-5b07-44ad-ba4a-8bf8f98c0412 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.682144] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7235d97a-29dd-440c-bc75-12d996c7963d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.684677] env[62684]: DEBUG oslo_concurrency.lockutils [req-52f6dfd9-0d76-45ec-94c1-6b8484eda350 req-ca9ed196-3f12-4600-864d-e047dc70ed68 service nova] Releasing lock "refresh_cache-e4528a29-163d-4f5e-9497-6e6b90b290ba" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1719.685383] env[62684]: DEBUG nova.compute.manager [req-52f6dfd9-0d76-45ec-94c1-6b8484eda350 req-ca9ed196-3f12-4600-864d-e047dc70ed68 service nova] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Received event network-vif-plugged-5868c4da-5351-4d35-8886-12ba976894db {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1719.685383] env[62684]: DEBUG oslo_concurrency.lockutils [req-52f6dfd9-0d76-45ec-94c1-6b8484eda350 req-ca9ed196-3f12-4600-864d-e047dc70ed68 service nova] Acquiring lock "0f9a525c-09b9-483e-b418-fea6e6e5dc4a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1719.685961] env[62684]: DEBUG oslo_concurrency.lockutils [req-52f6dfd9-0d76-45ec-94c1-6b8484eda350 req-ca9ed196-3f12-4600-864d-e047dc70ed68 service nova] Lock "0f9a525c-09b9-483e-b418-fea6e6e5dc4a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1719.685961] env[62684]: DEBUG oslo_concurrency.lockutils [req-52f6dfd9-0d76-45ec-94c1-6b8484eda350 req-ca9ed196-3f12-4600-864d-e047dc70ed68 service nova] Lock "0f9a525c-09b9-483e-b418-fea6e6e5dc4a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1719.685961] env[62684]: DEBUG nova.compute.manager [req-52f6dfd9-0d76-45ec-94c1-6b8484eda350 req-ca9ed196-3f12-4600-864d-e047dc70ed68 service nova] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] No waiting events found dispatching network-vif-plugged-5868c4da-5351-4d35-8886-12ba976894db {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1719.685961] env[62684]: WARNING nova.compute.manager [req-52f6dfd9-0d76-45ec-94c1-6b8484eda350 req-ca9ed196-3f12-4600-864d-e047dc70ed68 service nova] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Received unexpected event network-vif-plugged-5868c4da-5351-4d35-8886-12ba976894db for instance with vm_state building and task_state spawning. [ 1719.686102] env[62684]: DEBUG nova.compute.manager [req-52f6dfd9-0d76-45ec-94c1-6b8484eda350 req-ca9ed196-3f12-4600-864d-e047dc70ed68 service nova] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Received event network-changed-5868c4da-5351-4d35-8886-12ba976894db {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1719.686231] env[62684]: DEBUG nova.compute.manager [req-52f6dfd9-0d76-45ec-94c1-6b8484eda350 req-ca9ed196-3f12-4600-864d-e047dc70ed68 service nova] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Refreshing instance network info cache due to event network-changed-5868c4da-5351-4d35-8886-12ba976894db. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1719.686381] env[62684]: DEBUG oslo_concurrency.lockutils [req-52f6dfd9-0d76-45ec-94c1-6b8484eda350 req-ca9ed196-3f12-4600-864d-e047dc70ed68 service nova] Acquiring lock "refresh_cache-0f9a525c-09b9-483e-b418-fea6e6e5dc4a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1719.686522] env[62684]: DEBUG oslo_concurrency.lockutils [req-52f6dfd9-0d76-45ec-94c1-6b8484eda350 req-ca9ed196-3f12-4600-864d-e047dc70ed68 service nova] Acquired lock "refresh_cache-0f9a525c-09b9-483e-b418-fea6e6e5dc4a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1719.686677] env[62684]: DEBUG nova.network.neutron [req-52f6dfd9-0d76-45ec-94c1-6b8484eda350 req-ca9ed196-3f12-4600-864d-e047dc70ed68 service nova] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Refreshing network info cache for port 5868c4da-5351-4d35-8886-12ba976894db {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1719.707740] env[62684]: DEBUG oslo_vmware.api [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Waiting for the task: (returnval){ [ 1719.707740] env[62684]: value = "task-2052283" [ 1719.707740] env[62684]: _type = "Task" [ 1719.707740] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.707740] env[62684]: DEBUG nova.compute.provider_tree [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1719.714315] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1719.715329] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1719.718674] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-501ceda4-7302-4cce-a721-707b1217eba0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.722026] env[62684]: DEBUG oslo_vmware.api [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Task: {'id': task-2052283, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.726084] env[62684]: DEBUG oslo_vmware.api [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Waiting for the task: (returnval){ [ 1719.726084] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52719412-4704-129d-d3c5-66bccba77a2c" [ 1719.726084] env[62684]: _type = "Task" [ 1719.726084] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.735387] env[62684]: DEBUG oslo_vmware.api [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52719412-4704-129d-d3c5-66bccba77a2c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.893648] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052282, 'name': CreateVM_Task, 'duration_secs': 0.435579} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.893648] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1580c72-9345-436e-b4f7-56d319248864] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1719.898156] env[62684]: DEBUG oslo_vmware.service [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ebaf8de-6b62-406a-85c1-d3f0e94755b3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.909141] env[62684]: DEBUG oslo_concurrency.lockutils [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1719.909338] env[62684]: DEBUG oslo_concurrency.lockutils [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1719.909796] env[62684]: DEBUG oslo_concurrency.lockutils [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1719.914259] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0da49ef-a219-454a-9e80-ddbcad2aa3de {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.922027] env[62684]: DEBUG oslo_vmware.api [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Waiting for the task: (returnval){ [ 1719.922027] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5202e9a8-2895-64a6-1c2f-55855147f11b" [ 1719.922027] env[62684]: _type = "Task" [ 1719.922027] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.946436] env[62684]: DEBUG oslo_vmware.api [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5202e9a8-2895-64a6-1c2f-55855147f11b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.211636] env[62684]: DEBUG nova.scheduler.client.report [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1720.228633] env[62684]: DEBUG oslo_vmware.api [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Task: {'id': task-2052283, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.238935] env[62684]: DEBUG oslo_vmware.api [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52719412-4704-129d-d3c5-66bccba77a2c, 'name': SearchDatastore_Task, 'duration_secs': 0.008446} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1720.239816] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d335faec-3aec-41df-86ff-224065589592 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.254254] env[62684]: DEBUG oslo_vmware.api [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Waiting for the task: (returnval){ [ 1720.254254] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e0eb76-1009-b378-3ee7-b86508487cb5" [ 1720.254254] env[62684]: _type = "Task" [ 1720.254254] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.291728] env[62684]: DEBUG oslo_vmware.api [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e0eb76-1009-b378-3ee7-b86508487cb5, 'name': SearchDatastore_Task, 'duration_secs': 0.019383} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1720.291991] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1720.292252] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 0f9a525c-09b9-483e-b418-fea6e6e5dc4a/0f9a525c-09b9-483e-b418-fea6e6e5dc4a.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1720.292531] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d02094d6-8266-4ba9-9123-34ef5a845e6a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.310343] env[62684]: DEBUG oslo_vmware.api [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Waiting for the task: (returnval){ [ 1720.310343] env[62684]: value = "task-2052284" [ 1720.310343] env[62684]: _type = "Task" [ 1720.310343] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.329591] env[62684]: DEBUG oslo_vmware.api [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052284, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.416208] env[62684]: DEBUG oslo_concurrency.lockutils [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Acquiring lock "e1540aa6-12a4-4cff-a444-d47ee66c78d7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1720.416208] env[62684]: DEBUG oslo_concurrency.lockutils [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Lock "e1540aa6-12a4-4cff-a444-d47ee66c78d7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1720.440330] env[62684]: DEBUG nova.compute.manager [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1720.447753] env[62684]: DEBUG oslo_concurrency.lockutils [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1720.447753] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1720.449093] env[62684]: DEBUG oslo_concurrency.lockutils [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1720.449452] env[62684]: DEBUG oslo_concurrency.lockutils [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1720.449524] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1720.450431] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cfe33fb5-4602-46e9-b6b1-c00af2ca8f67 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.463672] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1720.464167] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1720.465408] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aa462ee-4b7c-4437-a2b4-5e884288995b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.474562] env[62684]: DEBUG nova.virt.hardware [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1720.474562] env[62684]: DEBUG nova.virt.hardware [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1720.474562] env[62684]: DEBUG nova.virt.hardware [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1720.474562] env[62684]: DEBUG nova.virt.hardware [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1720.474824] env[62684]: DEBUG nova.virt.hardware [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1720.474824] env[62684]: DEBUG nova.virt.hardware [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1720.474824] env[62684]: DEBUG nova.virt.hardware [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1720.474824] env[62684]: DEBUG nova.virt.hardware [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1720.474824] env[62684]: DEBUG nova.virt.hardware [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1720.474973] env[62684]: DEBUG nova.virt.hardware [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1720.475709] env[62684]: DEBUG nova.virt.hardware [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1720.476463] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf924b56-7497-41fd-a082-51ddacec33e9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.485833] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-577fa52a-00bb-4b4d-9c21-ca4f0808d90d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.489467] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-019eb0f2-0ef0-434d-bd4b-ce5fa70f13da {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.498703] env[62684]: DEBUG oslo_vmware.api [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Waiting for the task: (returnval){ [ 1720.498703] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5293c2ad-1d6d-6e1c-9d67-086c63f5e849" [ 1720.498703] env[62684]: _type = "Task" [ 1720.498703] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.520820] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Preparing fetch location {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1720.521916] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Creating directory with path [datastore2] vmware_temp/7e455fac-4f5d-4450-b766-411371a45a22/3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1720.521916] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fa61781d-a7b8-4c49-afeb-d4faec043256 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.555892] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Created directory with path [datastore2] vmware_temp/7e455fac-4f5d-4450-b766-411371a45a22/3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1720.555892] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Fetch image to [datastore2] vmware_temp/7e455fac-4f5d-4450-b766-411371a45a22/3931321c-cb4c-4b87-8d3a-50e05ea01db2/tmp-sparse.vmdk {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1720.555971] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Downloading image file data 3931321c-cb4c-4b87-8d3a-50e05ea01db2 to [datastore2] vmware_temp/7e455fac-4f5d-4450-b766-411371a45a22/3931321c-cb4c-4b87-8d3a-50e05ea01db2/tmp-sparse.vmdk on the data store datastore2 {{(pid=62684) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1720.556828] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47ddd480-5c96-4d2c-bc77-e099aa73267a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.566388] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2f25ba3-bd5a-4b66-bbaf-59905ae8258f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.577104] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88a7120a-3088-4f51-a27c-6e4e1238e064 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.614529] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab5d4cfc-b70b-49b3-b2ce-5e533ccb83ef {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.621354] env[62684]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1100e907-99b2-4a3d-b7ff-acdb3366a216 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.660224] env[62684]: DEBUG nova.compute.manager [req-04571415-74d5-4fcb-ae2f-15861a3f5bf3 req-d3d15fec-37ba-4fd1-a0cd-aebedd88bdca service nova] [instance: c1580c72-9345-436e-b4f7-56d319248864] Received event network-vif-plugged-c089de91-ca6e-40fe-8783-5b1644292445 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1720.660224] env[62684]: DEBUG oslo_concurrency.lockutils [req-04571415-74d5-4fcb-ae2f-15861a3f5bf3 req-d3d15fec-37ba-4fd1-a0cd-aebedd88bdca service nova] Acquiring lock "c1580c72-9345-436e-b4f7-56d319248864-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1720.660224] env[62684]: DEBUG oslo_concurrency.lockutils [req-04571415-74d5-4fcb-ae2f-15861a3f5bf3 req-d3d15fec-37ba-4fd1-a0cd-aebedd88bdca service nova] Lock "c1580c72-9345-436e-b4f7-56d319248864-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1720.660224] env[62684]: DEBUG oslo_concurrency.lockutils [req-04571415-74d5-4fcb-ae2f-15861a3f5bf3 req-d3d15fec-37ba-4fd1-a0cd-aebedd88bdca service nova] Lock "c1580c72-9345-436e-b4f7-56d319248864-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1720.660375] env[62684]: DEBUG nova.compute.manager [req-04571415-74d5-4fcb-ae2f-15861a3f5bf3 req-d3d15fec-37ba-4fd1-a0cd-aebedd88bdca service nova] [instance: c1580c72-9345-436e-b4f7-56d319248864] No waiting events found dispatching network-vif-plugged-c089de91-ca6e-40fe-8783-5b1644292445 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1720.660509] env[62684]: WARNING nova.compute.manager [req-04571415-74d5-4fcb-ae2f-15861a3f5bf3 req-d3d15fec-37ba-4fd1-a0cd-aebedd88bdca service nova] [instance: c1580c72-9345-436e-b4f7-56d319248864] Received unexpected event network-vif-plugged-c089de91-ca6e-40fe-8783-5b1644292445 for instance with vm_state building and task_state spawning. [ 1720.660673] env[62684]: DEBUG nova.compute.manager [req-04571415-74d5-4fcb-ae2f-15861a3f5bf3 req-d3d15fec-37ba-4fd1-a0cd-aebedd88bdca service nova] [instance: c1580c72-9345-436e-b4f7-56d319248864] Received event network-changed-c089de91-ca6e-40fe-8783-5b1644292445 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1720.660827] env[62684]: DEBUG nova.compute.manager [req-04571415-74d5-4fcb-ae2f-15861a3f5bf3 req-d3d15fec-37ba-4fd1-a0cd-aebedd88bdca service nova] [instance: c1580c72-9345-436e-b4f7-56d319248864] Refreshing instance network info cache due to event network-changed-c089de91-ca6e-40fe-8783-5b1644292445. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1720.661018] env[62684]: DEBUG oslo_concurrency.lockutils [req-04571415-74d5-4fcb-ae2f-15861a3f5bf3 req-d3d15fec-37ba-4fd1-a0cd-aebedd88bdca service nova] Acquiring lock "refresh_cache-c1580c72-9345-436e-b4f7-56d319248864" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1720.661153] env[62684]: DEBUG oslo_concurrency.lockutils [req-04571415-74d5-4fcb-ae2f-15861a3f5bf3 req-d3d15fec-37ba-4fd1-a0cd-aebedd88bdca service nova] Acquired lock "refresh_cache-c1580c72-9345-436e-b4f7-56d319248864" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1720.662119] env[62684]: DEBUG nova.network.neutron [req-04571415-74d5-4fcb-ae2f-15861a3f5bf3 req-d3d15fec-37ba-4fd1-a0cd-aebedd88bdca service nova] [instance: c1580c72-9345-436e-b4f7-56d319248864] Refreshing network info cache for port c089de91-ca6e-40fe-8783-5b1644292445 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1720.665950] env[62684]: DEBUG nova.network.neutron [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Successfully created port: fafc337e-8380-4431-acaa-5ab65e6b32d7 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1720.721741] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Downloading image file data 3931321c-cb4c-4b87-8d3a-50e05ea01db2 to the data store datastore2 {{(pid=62684) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1720.729179] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.328s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1720.732774] env[62684]: DEBUG nova.compute.manager [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1720.743855] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.200s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1720.749116] env[62684]: INFO nova.compute.claims [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1720.752908] env[62684]: DEBUG oslo_vmware.api [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Task: {'id': task-2052283, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.65029} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1720.760721] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] e4528a29-163d-4f5e-9497-6e6b90b290ba/e4528a29-163d-4f5e-9497-6e6b90b290ba.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1720.765022] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1720.765022] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2c0784b8-256a-4215-8db7-ad1f4f8eb842 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.776949] env[62684]: DEBUG oslo_vmware.api [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Waiting for the task: (returnval){ [ 1720.776949] env[62684]: value = "task-2052286" [ 1720.776949] env[62684]: _type = "Task" [ 1720.776949] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.788065] env[62684]: DEBUG oslo_vmware.api [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Task: {'id': task-2052286, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.820328] env[62684]: DEBUG oslo_vmware.rw_handles [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7e455fac-4f5d-4450-b766-411371a45a22/3931321c-cb4c-4b87-8d3a-50e05ea01db2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62684) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1720.895650] env[62684]: DEBUG oslo_vmware.api [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052284, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.490186} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1720.897392] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 0f9a525c-09b9-483e-b418-fea6e6e5dc4a/0f9a525c-09b9-483e-b418-fea6e6e5dc4a.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1720.897664] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1720.899309] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a52383d2-9667-47e3-b835-6e6b46c3bada {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.910208] env[62684]: DEBUG oslo_vmware.api [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Waiting for the task: (returnval){ [ 1720.910208] env[62684]: value = "task-2052287" [ 1720.910208] env[62684]: _type = "Task" [ 1720.910208] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.919895] env[62684]: DEBUG nova.compute.manager [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1720.922872] env[62684]: DEBUG oslo_vmware.api [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052287, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.260779] env[62684]: DEBUG nova.compute.utils [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1721.269023] env[62684]: DEBUG nova.compute.manager [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1721.269243] env[62684]: DEBUG nova.network.neutron [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1721.276298] env[62684]: DEBUG nova.network.neutron [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Successfully updated port: 53627d03-ce5e-439d-9055-b5abf4b099ed {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1721.293085] env[62684]: DEBUG oslo_vmware.api [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Task: {'id': task-2052286, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06675} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1721.293683] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1721.295390] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e720d1a8-6d20-4b48-a6ed-96b1acdf3454 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.324778] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Reconfiguring VM instance instance-00000001 to attach disk [datastore1] e4528a29-163d-4f5e-9497-6e6b90b290ba/e4528a29-163d-4f5e-9497-6e6b90b290ba.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1721.326184] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a7e54021-2948-490f-84ff-99ac8bd9311a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.341617] env[62684]: DEBUG nova.network.neutron [req-52f6dfd9-0d76-45ec-94c1-6b8484eda350 req-ca9ed196-3f12-4600-864d-e047dc70ed68 service nova] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Updated VIF entry in instance network info cache for port 5868c4da-5351-4d35-8886-12ba976894db. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1721.341977] env[62684]: DEBUG nova.network.neutron [req-52f6dfd9-0d76-45ec-94c1-6b8484eda350 req-ca9ed196-3f12-4600-864d-e047dc70ed68 service nova] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Updating instance_info_cache with network_info: [{"id": "5868c4da-5351-4d35-8886-12ba976894db", "address": "fa:16:3e:26:ba:29", "network": {"id": "ad517579-bdcb-4ccc-8e16-74f3524aa5f6", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1045737469-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c3a7760a18d54bc4b8b4fd291e127381", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "111a2767-1b06-4fe5-852b-40c9b5a699fd", "external-id": "nsx-vlan-transportzone-975", "segmentation_id": 975, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5868c4da-53", "ovs_interfaceid": "5868c4da-5351-4d35-8886-12ba976894db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1721.353233] env[62684]: DEBUG oslo_vmware.api [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Waiting for the task: (returnval){ [ 1721.353233] env[62684]: value = "task-2052288" [ 1721.353233] env[62684]: _type = "Task" [ 1721.353233] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.365989] env[62684]: DEBUG oslo_vmware.api [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Task: {'id': task-2052288, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.436780] env[62684]: DEBUG oslo_vmware.api [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052287, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071845} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1721.437611] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1721.438274] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33e066cc-5be9-4040-bc57-540f53845343 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.461107] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Reconfiguring VM instance instance-00000002 to attach disk [datastore1] 0f9a525c-09b9-483e-b418-fea6e6e5dc4a/0f9a525c-09b9-483e-b418-fea6e6e5dc4a.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1721.462396] env[62684]: DEBUG oslo_concurrency.lockutils [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1721.465915] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f2b5a2b-4c98-4b4e-8459-488a0f4b05e6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.489834] env[62684]: DEBUG oslo_vmware.api [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Waiting for the task: (returnval){ [ 1721.489834] env[62684]: value = "task-2052289" [ 1721.489834] env[62684]: _type = "Task" [ 1721.489834] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.501581] env[62684]: DEBUG oslo_vmware.api [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052289, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.522361] env[62684]: DEBUG oslo_vmware.rw_handles [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Completed reading data from the image iterator. {{(pid=62684) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1721.522645] env[62684]: DEBUG oslo_vmware.rw_handles [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7e455fac-4f5d-4450-b766-411371a45a22/3931321c-cb4c-4b87-8d3a-50e05ea01db2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62684) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1721.665923] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Downloaded image file data 3931321c-cb4c-4b87-8d3a-50e05ea01db2 to vmware_temp/7e455fac-4f5d-4450-b766-411371a45a22/3931321c-cb4c-4b87-8d3a-50e05ea01db2/tmp-sparse.vmdk on the data store datastore2 {{(pid=62684) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1721.667952] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Caching image {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1721.668251] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Copying Virtual Disk [datastore2] vmware_temp/7e455fac-4f5d-4450-b766-411371a45a22/3931321c-cb4c-4b87-8d3a-50e05ea01db2/tmp-sparse.vmdk to [datastore2] vmware_temp/7e455fac-4f5d-4450-b766-411371a45a22/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1721.668469] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1bb8b815-508c-4160-b725-31dbee0e6f00 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.677716] env[62684]: DEBUG oslo_vmware.api [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Waiting for the task: (returnval){ [ 1721.677716] env[62684]: value = "task-2052290" [ 1721.677716] env[62684]: _type = "Task" [ 1721.677716] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.685860] env[62684]: DEBUG nova.policy [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6cc355a545ee470d8082f0a96dafe513', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '147d85277da2482db0c24803c664cb93', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1721.697175] env[62684]: DEBUG oslo_vmware.api [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Task: {'id': task-2052290, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.770097] env[62684]: DEBUG nova.compute.manager [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1721.780340] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Acquiring lock "refresh_cache-4cbcfa1a-c034-4de7-ad25-4ad22316067e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1721.780494] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Acquired lock "refresh_cache-4cbcfa1a-c034-4de7-ad25-4ad22316067e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1721.780634] env[62684]: DEBUG nova.network.neutron [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1721.849607] env[62684]: DEBUG oslo_concurrency.lockutils [req-52f6dfd9-0d76-45ec-94c1-6b8484eda350 req-ca9ed196-3f12-4600-864d-e047dc70ed68 service nova] Releasing lock "refresh_cache-0f9a525c-09b9-483e-b418-fea6e6e5dc4a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1721.866527] env[62684]: DEBUG oslo_vmware.api [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Task: {'id': task-2052288, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.982991] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ab77a1e-7df2-4dc2-9ff8-21d4c389b674 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.998434] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e85eeb0f-d4a0-4733-a478-40a0e9724419 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.011528] env[62684]: DEBUG oslo_vmware.api [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052289, 'name': ReconfigVM_Task, 'duration_secs': 0.453262} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.042630] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Reconfigured VM instance instance-00000002 to attach disk [datastore1] 0f9a525c-09b9-483e-b418-fea6e6e5dc4a/0f9a525c-09b9-483e-b418-fea6e6e5dc4a.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1722.044040] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-274b5ef3-05a2-4736-8465-621bae759d4f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.046950] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82fdf11c-5fb9-4234-b8b0-3e88e55d014b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.057071] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-212de339-e878-49fe-9db0-637f578dfd37 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.065038] env[62684]: DEBUG oslo_vmware.api [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Waiting for the task: (returnval){ [ 1722.065038] env[62684]: value = "task-2052291" [ 1722.065038] env[62684]: _type = "Task" [ 1722.065038] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.077605] env[62684]: DEBUG nova.compute.provider_tree [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1722.084792] env[62684]: DEBUG oslo_vmware.api [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052291, 'name': Rename_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.193019] env[62684]: DEBUG oslo_vmware.api [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Task: {'id': task-2052290, 'name': CopyVirtualDisk_Task} progress is 27%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.375134] env[62684]: DEBUG nova.network.neutron [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1722.378521] env[62684]: DEBUG oslo_vmware.api [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Task: {'id': task-2052288, 'name': ReconfigVM_Task, 'duration_secs': 0.79431} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.378905] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Reconfigured VM instance instance-00000001 to attach disk [datastore1] e4528a29-163d-4f5e-9497-6e6b90b290ba/e4528a29-163d-4f5e-9497-6e6b90b290ba.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1722.382144] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-27f05378-b0a6-443d-be5a-29552dad557e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.389054] env[62684]: DEBUG oslo_vmware.api [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Waiting for the task: (returnval){ [ 1722.389054] env[62684]: value = "task-2052292" [ 1722.389054] env[62684]: _type = "Task" [ 1722.389054] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.401092] env[62684]: DEBUG oslo_vmware.api [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Task: {'id': task-2052292, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.575715] env[62684]: DEBUG oslo_vmware.api [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052291, 'name': Rename_Task, 'duration_secs': 0.31883} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.576290] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1722.576697] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c858a16b-0259-4d1f-a2a1-3292b3e7ecbc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.586415] env[62684]: DEBUG nova.scheduler.client.report [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1722.589923] env[62684]: DEBUG oslo_vmware.api [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Waiting for the task: (returnval){ [ 1722.589923] env[62684]: value = "task-2052294" [ 1722.589923] env[62684]: _type = "Task" [ 1722.589923] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.606584] env[62684]: DEBUG oslo_vmware.api [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052294, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.693901] env[62684]: DEBUG oslo_vmware.api [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Task: {'id': task-2052290, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.785210] env[62684]: DEBUG nova.compute.manager [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1722.803776] env[62684]: DEBUG nova.network.neutron [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Updating instance_info_cache with network_info: [{"id": "53627d03-ce5e-439d-9055-b5abf4b099ed", "address": "fa:16:3e:60:9e:73", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.178", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53627d03-ce", "ovs_interfaceid": "53627d03-ce5e-439d-9055-b5abf4b099ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1722.810188] env[62684]: DEBUG nova.virt.hardware [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1722.810188] env[62684]: DEBUG nova.virt.hardware [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1722.810188] env[62684]: DEBUG nova.virt.hardware [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1722.810188] env[62684]: DEBUG nova.virt.hardware [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1722.810381] env[62684]: DEBUG nova.virt.hardware [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1722.810381] env[62684]: DEBUG nova.virt.hardware [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1722.810381] env[62684]: DEBUG nova.virt.hardware [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1722.810381] env[62684]: DEBUG nova.virt.hardware [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1722.810381] env[62684]: DEBUG nova.virt.hardware [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1722.810573] env[62684]: DEBUG nova.virt.hardware [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1722.810573] env[62684]: DEBUG nova.virt.hardware [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1722.811294] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6473ac55-518d-41fe-b79c-358f3cf4cdcf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.822501] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74f47073-373b-40e3-9936-94add10312f0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.899767] env[62684]: DEBUG oslo_vmware.api [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Task: {'id': task-2052292, 'name': Rename_Task, 'duration_secs': 0.146789} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.900046] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1722.900294] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-60794f0c-b2c5-41cf-9b32-f59bfceff3d9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.907726] env[62684]: DEBUG oslo_vmware.api [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Waiting for the task: (returnval){ [ 1722.907726] env[62684]: value = "task-2052295" [ 1722.907726] env[62684]: _type = "Task" [ 1722.907726] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.916759] env[62684]: DEBUG oslo_vmware.api [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Task: {'id': task-2052295, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.065460] env[62684]: DEBUG nova.network.neutron [req-04571415-74d5-4fcb-ae2f-15861a3f5bf3 req-d3d15fec-37ba-4fd1-a0cd-aebedd88bdca service nova] [instance: c1580c72-9345-436e-b4f7-56d319248864] Updated VIF entry in instance network info cache for port c089de91-ca6e-40fe-8783-5b1644292445. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1723.065973] env[62684]: DEBUG nova.network.neutron [req-04571415-74d5-4fcb-ae2f-15861a3f5bf3 req-d3d15fec-37ba-4fd1-a0cd-aebedd88bdca service nova] [instance: c1580c72-9345-436e-b4f7-56d319248864] Updating instance_info_cache with network_info: [{"id": "c089de91-ca6e-40fe-8783-5b1644292445", "address": "fa:16:3e:de:7f:6c", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.66", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc089de91-ca", "ovs_interfaceid": "c089de91-ca6e-40fe-8783-5b1644292445", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1723.100964] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.358s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1723.101586] env[62684]: DEBUG nova.compute.manager [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1723.104640] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.032s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1723.108633] env[62684]: INFO nova.compute.claims [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1723.117321] env[62684]: DEBUG oslo_vmware.api [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052294, 'name': PowerOnVM_Task, 'duration_secs': 0.475058} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1723.117628] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1723.118839] env[62684]: INFO nova.compute.manager [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Took 11.57 seconds to spawn the instance on the hypervisor. [ 1723.119649] env[62684]: DEBUG nova.compute.manager [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1723.120666] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfee12e5-d465-4518-b5ba-a90b4cbb61a9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.199142] env[62684]: DEBUG oslo_vmware.api [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Task: {'id': task-2052290, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.023954} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1723.199142] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Copied Virtual Disk [datastore2] vmware_temp/7e455fac-4f5d-4450-b766-411371a45a22/3931321c-cb4c-4b87-8d3a-50e05ea01db2/tmp-sparse.vmdk to [datastore2] vmware_temp/7e455fac-4f5d-4450-b766-411371a45a22/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1723.199142] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Deleting the datastore file [datastore2] vmware_temp/7e455fac-4f5d-4450-b766-411371a45a22/3931321c-cb4c-4b87-8d3a-50e05ea01db2/tmp-sparse.vmdk {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1723.199142] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-82d8958d-39bb-4cdb-bc1c-8481944e4076 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.206246] env[62684]: DEBUG oslo_vmware.api [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Waiting for the task: (returnval){ [ 1723.206246] env[62684]: value = "task-2052296" [ 1723.206246] env[62684]: _type = "Task" [ 1723.206246] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1723.216235] env[62684]: DEBUG oslo_vmware.api [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Task: {'id': task-2052296, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.315730] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Releasing lock "refresh_cache-4cbcfa1a-c034-4de7-ad25-4ad22316067e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1723.316103] env[62684]: DEBUG nova.compute.manager [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Instance network_info: |[{"id": "53627d03-ce5e-439d-9055-b5abf4b099ed", "address": "fa:16:3e:60:9e:73", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.178", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53627d03-ce", "ovs_interfaceid": "53627d03-ce5e-439d-9055-b5abf4b099ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1723.316524] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:60:9e:73', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ded18042-834c-4792-b3e8-b1c377446432', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '53627d03-ce5e-439d-9055-b5abf4b099ed', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1723.324192] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Creating folder: Project (bcd3c908bccd421292836d1cde1fc5e3). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1723.324550] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cc0b004a-bb57-4641-9490-770c90079bbe {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.336461] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Created folder: Project (bcd3c908bccd421292836d1cde1fc5e3) in parent group-v421118. [ 1723.336566] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Creating folder: Instances. Parent ref: group-v421132. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1723.336752] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-62991c09-211a-48f8-b8f2-5f86559ff003 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.348071] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Created folder: Instances in parent group-v421132. [ 1723.348992] env[62684]: DEBUG oslo.service.loopingcall [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1723.348992] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1723.348992] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f25b7056-3498-4c10-a05b-ba367c8cc1b4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.371709] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1723.371709] env[62684]: value = "task-2052299" [ 1723.371709] env[62684]: _type = "Task" [ 1723.371709] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1723.378877] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052299, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.421812] env[62684]: DEBUG oslo_vmware.api [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Task: {'id': task-2052295, 'name': PowerOnVM_Task, 'duration_secs': 0.478087} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1723.422084] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1723.423592] env[62684]: INFO nova.compute.manager [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Took 14.21 seconds to spawn the instance on the hypervisor. [ 1723.425423] env[62684]: DEBUG nova.compute.manager [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1723.425423] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98cde6d0-2c45-4fd3-9683-0a549fd1f32e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.536133] env[62684]: DEBUG nova.network.neutron [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Successfully updated port: fafc337e-8380-4431-acaa-5ab65e6b32d7 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1723.570471] env[62684]: DEBUG oslo_concurrency.lockutils [req-04571415-74d5-4fcb-ae2f-15861a3f5bf3 req-d3d15fec-37ba-4fd1-a0cd-aebedd88bdca service nova] Releasing lock "refresh_cache-c1580c72-9345-436e-b4f7-56d319248864" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1723.615122] env[62684]: DEBUG nova.compute.utils [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1723.619085] env[62684]: DEBUG nova.compute.manager [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1723.619085] env[62684]: DEBUG nova.network.neutron [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] [instance: 91869c00-edd0-40a8-84df-d8842d750558] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1723.653291] env[62684]: INFO nova.compute.manager [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Took 17.93 seconds to build instance. [ 1723.716439] env[62684]: DEBUG oslo_vmware.api [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Task: {'id': task-2052296, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.034316} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1723.717789] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1723.717789] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Moving file from [datastore2] vmware_temp/7e455fac-4f5d-4450-b766-411371a45a22/3931321c-cb4c-4b87-8d3a-50e05ea01db2 to [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2. {{(pid=62684) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 1723.717789] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-8b9629c2-fb3e-484e-ba31-e34aee7d405c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.727114] env[62684]: DEBUG oslo_vmware.api [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Waiting for the task: (returnval){ [ 1723.727114] env[62684]: value = "task-2052300" [ 1723.727114] env[62684]: _type = "Task" [ 1723.727114] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1723.736445] env[62684]: DEBUG oslo_vmware.api [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Task: {'id': task-2052300, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.737954] env[62684]: DEBUG nova.policy [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '162fb2b124ad42ba96126c691bf7c38e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4058aa2437d84bb49740f062876d66a2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1723.887457] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052299, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.949929] env[62684]: INFO nova.compute.manager [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Took 18.99 seconds to build instance. [ 1724.040628] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "refresh_cache-8d53d8c3-6db8-4ebe-a35f-0f64602fafcb" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1724.040779] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquired lock "refresh_cache-8d53d8c3-6db8-4ebe-a35f-0f64602fafcb" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1724.040920] env[62684]: DEBUG nova.network.neutron [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1724.121668] env[62684]: DEBUG nova.compute.manager [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1724.141914] env[62684]: DEBUG nova.network.neutron [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Successfully created port: 10023d3d-f0cd-49c9-984f-fb3f2af83e3b {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1724.159025] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6f8e8fb8-05ba-4bd0-a9d9-1c6f54793df0 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Lock "0f9a525c-09b9-483e-b418-fea6e6e5dc4a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.443s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1724.241511] env[62684]: DEBUG oslo_vmware.api [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Task: {'id': task-2052300, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.036985} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.241799] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] File moved {{(pid=62684) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 1724.241990] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Cleaning up location [datastore2] vmware_temp/7e455fac-4f5d-4450-b766-411371a45a22 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1724.242192] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Deleting the datastore file [datastore2] vmware_temp/7e455fac-4f5d-4450-b766-411371a45a22 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1724.242527] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4408caac-4fc7-4f2b-9e1b-2b512e37a213 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.253981] env[62684]: DEBUG oslo_vmware.api [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Waiting for the task: (returnval){ [ 1724.253981] env[62684]: value = "task-2052301" [ 1724.253981] env[62684]: _type = "Task" [ 1724.253981] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.264009] env[62684]: DEBUG oslo_vmware.api [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Task: {'id': task-2052301, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.306016] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2286d9dd-ae0c-41cb-bc63-000b2c59f5e9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.314315] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ab1c5a4-5150-4f90-8e3a-279c104e0d88 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.349096] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ae3206e-57df-4c2b-ac33-d484049b3595 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.357225] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1646375-0e97-40f9-acd7-33e32a7d62a3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.374694] env[62684]: DEBUG nova.compute.provider_tree [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1724.385650] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052299, 'name': CreateVM_Task, 'duration_secs': 0.71015} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.386896] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1724.387432] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1724.387536] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1724.387890] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1724.388572] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e71b79ac-df97-44f5-afe4-4bf09fc560cf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.393861] env[62684]: DEBUG oslo_vmware.api [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Waiting for the task: (returnval){ [ 1724.393861] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5206bb07-6d9b-f69a-48f3-5e2c8ccc9a1b" [ 1724.393861] env[62684]: _type = "Task" [ 1724.393861] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.403595] env[62684]: DEBUG oslo_vmware.api [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5206bb07-6d9b-f69a-48f3-5e2c8ccc9a1b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.454616] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb1d49df-86cd-46f9-8417-be97e374b418 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Lock "e4528a29-163d-4f5e-9497-6e6b90b290ba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.499s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1724.484531] env[62684]: DEBUG nova.network.neutron [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Successfully created port: 45eb32aa-2917-4675-a758-bf202fb0fc08 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1724.648725] env[62684]: DEBUG nova.network.neutron [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1724.764098] env[62684]: DEBUG oslo_vmware.api [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Task: {'id': task-2052301, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.070582} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.765963] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1724.765963] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-466e7cb7-8d17-44fe-9555-d307c13aa057 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.777670] env[62684]: DEBUG oslo_vmware.api [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Waiting for the task: (returnval){ [ 1724.777670] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5257a871-1cc0-d356-2d8a-fb706c7d94f9" [ 1724.777670] env[62684]: _type = "Task" [ 1724.777670] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.786346] env[62684]: DEBUG oslo_vmware.api [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5257a871-1cc0-d356-2d8a-fb706c7d94f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.824695] env[62684]: DEBUG nova.compute.manager [req-7f043b12-075b-44fa-a031-67d3363547ea req-2a8699ac-a957-4afb-a4c9-6aeb4c68e4a3 service nova] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Received event network-vif-plugged-53627d03-ce5e-439d-9055-b5abf4b099ed {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1724.825096] env[62684]: DEBUG oslo_concurrency.lockutils [req-7f043b12-075b-44fa-a031-67d3363547ea req-2a8699ac-a957-4afb-a4c9-6aeb4c68e4a3 service nova] Acquiring lock "4cbcfa1a-c034-4de7-ad25-4ad22316067e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1724.825441] env[62684]: DEBUG oslo_concurrency.lockutils [req-7f043b12-075b-44fa-a031-67d3363547ea req-2a8699ac-a957-4afb-a4c9-6aeb4c68e4a3 service nova] Lock "4cbcfa1a-c034-4de7-ad25-4ad22316067e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1724.825738] env[62684]: DEBUG oslo_concurrency.lockutils [req-7f043b12-075b-44fa-a031-67d3363547ea req-2a8699ac-a957-4afb-a4c9-6aeb4c68e4a3 service nova] Lock "4cbcfa1a-c034-4de7-ad25-4ad22316067e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1724.825834] env[62684]: DEBUG nova.compute.manager [req-7f043b12-075b-44fa-a031-67d3363547ea req-2a8699ac-a957-4afb-a4c9-6aeb4c68e4a3 service nova] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] No waiting events found dispatching network-vif-plugged-53627d03-ce5e-439d-9055-b5abf4b099ed {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1724.826168] env[62684]: WARNING nova.compute.manager [req-7f043b12-075b-44fa-a031-67d3363547ea req-2a8699ac-a957-4afb-a4c9-6aeb4c68e4a3 service nova] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Received unexpected event network-vif-plugged-53627d03-ce5e-439d-9055-b5abf4b099ed for instance with vm_state building and task_state spawning. [ 1724.827115] env[62684]: DEBUG nova.compute.manager [req-7f043b12-075b-44fa-a031-67d3363547ea req-2a8699ac-a957-4afb-a4c9-6aeb4c68e4a3 service nova] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Received event network-changed-53627d03-ce5e-439d-9055-b5abf4b099ed {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1724.827336] env[62684]: DEBUG nova.compute.manager [req-7f043b12-075b-44fa-a031-67d3363547ea req-2a8699ac-a957-4afb-a4c9-6aeb4c68e4a3 service nova] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Refreshing instance network info cache due to event network-changed-53627d03-ce5e-439d-9055-b5abf4b099ed. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1724.827538] env[62684]: DEBUG oslo_concurrency.lockutils [req-7f043b12-075b-44fa-a031-67d3363547ea req-2a8699ac-a957-4afb-a4c9-6aeb4c68e4a3 service nova] Acquiring lock "refresh_cache-4cbcfa1a-c034-4de7-ad25-4ad22316067e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1724.827689] env[62684]: DEBUG oslo_concurrency.lockutils [req-7f043b12-075b-44fa-a031-67d3363547ea req-2a8699ac-a957-4afb-a4c9-6aeb4c68e4a3 service nova] Acquired lock "refresh_cache-4cbcfa1a-c034-4de7-ad25-4ad22316067e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1724.828430] env[62684]: DEBUG nova.network.neutron [req-7f043b12-075b-44fa-a031-67d3363547ea req-2a8699ac-a957-4afb-a4c9-6aeb4c68e4a3 service nova] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Refreshing network info cache for port 53627d03-ce5e-439d-9055-b5abf4b099ed {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1724.881188] env[62684]: DEBUG nova.scheduler.client.report [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1724.912411] env[62684]: DEBUG oslo_vmware.api [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5206bb07-6d9b-f69a-48f3-5e2c8ccc9a1b, 'name': SearchDatastore_Task, 'duration_secs': 0.023023} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.915755] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1724.917085] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1724.917085] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1724.979524] env[62684]: DEBUG nova.network.neutron [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Updating instance_info_cache with network_info: [{"id": "fafc337e-8380-4431-acaa-5ab65e6b32d7", "address": "fa:16:3e:f6:3e:99", "network": {"id": "2fa98fa4-ff7c-44e6-add0-693f55fd4b03", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2019954029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7855def9d0aa49abb7003ee504b9ccaf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfafc337e-83", "ovs_interfaceid": "fafc337e-8380-4431-acaa-5ab65e6b32d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1725.137830] env[62684]: DEBUG nova.compute.manager [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1725.168546] env[62684]: DEBUG nova.virt.hardware [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1725.168546] env[62684]: DEBUG nova.virt.hardware [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1725.168681] env[62684]: DEBUG nova.virt.hardware [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1725.168845] env[62684]: DEBUG nova.virt.hardware [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1725.168877] env[62684]: DEBUG nova.virt.hardware [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1725.169020] env[62684]: DEBUG nova.virt.hardware [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1725.169237] env[62684]: DEBUG nova.virt.hardware [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1725.169392] env[62684]: DEBUG nova.virt.hardware [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1725.169585] env[62684]: DEBUG nova.virt.hardware [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1725.169765] env[62684]: DEBUG nova.virt.hardware [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1725.169953] env[62684]: DEBUG nova.virt.hardware [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1725.171168] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aebed05-57b5-4139-b0f6-0f4494e3bbf1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.180434] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc0480db-5ac6-41dd-9a73-eb9e029e6855 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.285365] env[62684]: DEBUG oslo_vmware.api [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5257a871-1cc0-d356-2d8a-fb706c7d94f9, 'name': SearchDatastore_Task, 'duration_secs': 0.019018} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1725.285365] env[62684]: DEBUG oslo_concurrency.lockutils [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1725.285365] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] c1580c72-9345-436e-b4f7-56d319248864/c1580c72-9345-436e-b4f7-56d319248864.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1725.285365] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1725.285692] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1725.285692] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-10392fdd-e44d-4984-a05a-f1c801ae0187 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.286753] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-971d9b7f-2d88-4aaf-83c8-baa2e92333aa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.294467] env[62684]: DEBUG oslo_vmware.api [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Waiting for the task: (returnval){ [ 1725.294467] env[62684]: value = "task-2052303" [ 1725.294467] env[62684]: _type = "Task" [ 1725.294467] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.300777] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1725.300777] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1725.302025] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-beceaa66-c958-4d13-9fa3-d749f989dd52 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.308030] env[62684]: DEBUG oslo_vmware.api [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Task: {'id': task-2052303, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.310664] env[62684]: DEBUG oslo_vmware.api [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Waiting for the task: (returnval){ [ 1725.310664] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523f0f5b-4097-15f6-bb34-679cdf1a7354" [ 1725.310664] env[62684]: _type = "Task" [ 1725.310664] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.321958] env[62684]: DEBUG oslo_vmware.api [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523f0f5b-4097-15f6-bb34-679cdf1a7354, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.389128] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.284s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1725.389779] env[62684]: DEBUG nova.compute.manager [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1725.392867] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 12.072s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1725.392867] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1725.396036] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62684) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1725.396036] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.676s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1725.396920] env[62684]: INFO nova.compute.claims [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1725.401061] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-983356e8-c5d7-4a6d-8813-e77808ab0b3b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.409944] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba97eb89-0d72-48ca-a4e2-35be8ece5e32 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.427149] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dad47750-e16c-4f89-8737-46ddcb0ba949 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.436373] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89e8748d-e65a-48bf-9907-84237c518949 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.471867] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181205MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=62684) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1725.472169] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1725.484388] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Releasing lock "refresh_cache-8d53d8c3-6db8-4ebe-a35f-0f64602fafcb" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1725.484844] env[62684]: DEBUG nova.compute.manager [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Instance network_info: |[{"id": "fafc337e-8380-4431-acaa-5ab65e6b32d7", "address": "fa:16:3e:f6:3e:99", "network": {"id": "2fa98fa4-ff7c-44e6-add0-693f55fd4b03", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2019954029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7855def9d0aa49abb7003ee504b9ccaf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfafc337e-83", "ovs_interfaceid": "fafc337e-8380-4431-acaa-5ab65e6b32d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1725.485775] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f6:3e:99', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0cd5d325-3053-407e-a4ee-f627e82a23f9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fafc337e-8380-4431-acaa-5ab65e6b32d7', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1725.493945] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Creating folder: Project (7855def9d0aa49abb7003ee504b9ccaf). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1725.494680] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ad7181be-bc2f-474b-84d5-fe29fa35c9f7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.506713] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Created folder: Project (7855def9d0aa49abb7003ee504b9ccaf) in parent group-v421118. [ 1725.506713] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Creating folder: Instances. Parent ref: group-v421135. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1725.506957] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4955642b-820d-4900-9b1b-851033d26898 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.517840] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Created folder: Instances in parent group-v421135. [ 1725.517840] env[62684]: DEBUG oslo.service.loopingcall [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1725.517840] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1725.517840] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-921ffb55-9780-4f62-8cc6-6d95033a2143 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.539373] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1725.539373] env[62684]: value = "task-2052306" [ 1725.539373] env[62684]: _type = "Task" [ 1725.539373] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.549919] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052306, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.809829] env[62684]: DEBUG oslo_vmware.api [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Task: {'id': task-2052303, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.826989] env[62684]: DEBUG oslo_vmware.api [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523f0f5b-4097-15f6-bb34-679cdf1a7354, 'name': SearchDatastore_Task, 'duration_secs': 0.011163} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1725.828644] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97914c80-87f4-44d3-b0e3-c7f514f93e5d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.836233] env[62684]: DEBUG oslo_vmware.api [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Waiting for the task: (returnval){ [ 1725.836233] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521fdcb2-970b-e0d1-38dd-521eac5eabd5" [ 1725.836233] env[62684]: _type = "Task" [ 1725.836233] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.850274] env[62684]: DEBUG oslo_vmware.api [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521fdcb2-970b-e0d1-38dd-521eac5eabd5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.902910] env[62684]: DEBUG nova.compute.utils [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1725.906433] env[62684]: DEBUG nova.compute.manager [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1725.906433] env[62684]: DEBUG nova.network.neutron [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: effc673a-103f-413b-88ac-6907ad1ee852] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1725.971028] env[62684]: DEBUG nova.policy [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '04b9379883ea40959090a52ce58805a8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9e2e0ad7001b4b59805c1d6a3a0caf35', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1726.058818] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052306, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.108641] env[62684]: DEBUG nova.network.neutron [req-7f043b12-075b-44fa-a031-67d3363547ea req-2a8699ac-a957-4afb-a4c9-6aeb4c68e4a3 service nova] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Updated VIF entry in instance network info cache for port 53627d03-ce5e-439d-9055-b5abf4b099ed. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1726.109280] env[62684]: DEBUG nova.network.neutron [req-7f043b12-075b-44fa-a031-67d3363547ea req-2a8699ac-a957-4afb-a4c9-6aeb4c68e4a3 service nova] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Updating instance_info_cache with network_info: [{"id": "53627d03-ce5e-439d-9055-b5abf4b099ed", "address": "fa:16:3e:60:9e:73", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.178", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53627d03-ce", "ovs_interfaceid": "53627d03-ce5e-439d-9055-b5abf4b099ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1726.306640] env[62684]: DEBUG oslo_vmware.api [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Task: {'id': task-2052303, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.674137} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1726.306929] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] c1580c72-9345-436e-b4f7-56d319248864/c1580c72-9345-436e-b4f7-56d319248864.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1726.307154] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1726.307399] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b06ae35e-4b89-43b7-b40a-432038bb2370 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.315477] env[62684]: DEBUG oslo_vmware.api [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Waiting for the task: (returnval){ [ 1726.315477] env[62684]: value = "task-2052307" [ 1726.315477] env[62684]: _type = "Task" [ 1726.315477] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.324979] env[62684]: DEBUG oslo_vmware.api [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Task: {'id': task-2052307, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.350576] env[62684]: DEBUG oslo_vmware.api [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521fdcb2-970b-e0d1-38dd-521eac5eabd5, 'name': SearchDatastore_Task, 'duration_secs': 0.056725} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1726.350956] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1726.351303] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 4cbcfa1a-c034-4de7-ad25-4ad22316067e/4cbcfa1a-c034-4de7-ad25-4ad22316067e.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1726.351619] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-baa9d29e-af4f-442c-8a65-404f2ac21c4a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.359053] env[62684]: DEBUG oslo_vmware.api [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Waiting for the task: (returnval){ [ 1726.359053] env[62684]: value = "task-2052308" [ 1726.359053] env[62684]: _type = "Task" [ 1726.359053] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.371237] env[62684]: DEBUG oslo_vmware.api [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': task-2052308, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.407905] env[62684]: DEBUG nova.compute.manager [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1726.556923] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052306, 'name': CreateVM_Task, 'duration_secs': 0.614061} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1726.560707] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1726.560707] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1726.560707] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1726.560977] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1726.561243] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a255348-84a7-4868-a5af-d1c8ca76a7f0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.573661] env[62684]: DEBUG oslo_vmware.api [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1726.573661] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e573bd-6a70-5bf7-9c8c-694525d7dd36" [ 1726.573661] env[62684]: _type = "Task" [ 1726.573661] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.580126] env[62684]: DEBUG nova.network.neutron [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Successfully created port: b9e25052-ec41-470d-b549-89e542cb4366 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1726.583414] env[62684]: DEBUG nova.network.neutron [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Successfully updated port: 10023d3d-f0cd-49c9-984f-fb3f2af83e3b {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1726.595494] env[62684]: DEBUG oslo_vmware.api [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e573bd-6a70-5bf7-9c8c-694525d7dd36, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.612135] env[62684]: DEBUG oslo_concurrency.lockutils [req-7f043b12-075b-44fa-a031-67d3363547ea req-2a8699ac-a957-4afb-a4c9-6aeb4c68e4a3 service nova] Releasing lock "refresh_cache-4cbcfa1a-c034-4de7-ad25-4ad22316067e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1726.707228] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b18a7c0b-0e09-43a2-81db-d6155490db6a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.724374] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17ecfc7c-90fe-492e-802c-ff741ba630fb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.766912] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2a44a3e-bb50-4e1f-abed-706390c53fb4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.781145] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-708db2f0-315a-4e75-a68c-a0a643f716c4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.799060] env[62684]: DEBUG nova.compute.provider_tree [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1726.830096] env[62684]: DEBUG oslo_vmware.api [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Task: {'id': task-2052307, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066928} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1726.830423] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1726.831435] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50b25f54-7254-45fe-8793-1ce95302fc0a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.857516] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Reconfiguring VM instance instance-00000003 to attach disk [datastore2] c1580c72-9345-436e-b4f7-56d319248864/c1580c72-9345-436e-b4f7-56d319248864.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1726.857906] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-76e8f520-4efc-4b72-901f-af8e1ee69c62 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.886887] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Acquiring lock "d532b5fa-90a3-4f25-8684-4eabaa432c86" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1726.887574] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Lock "d532b5fa-90a3-4f25-8684-4eabaa432c86" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1726.888885] env[62684]: DEBUG oslo_vmware.api [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Waiting for the task: (returnval){ [ 1726.888885] env[62684]: value = "task-2052309" [ 1726.888885] env[62684]: _type = "Task" [ 1726.888885] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.893015] env[62684]: DEBUG oslo_vmware.api [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': task-2052308, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.903978] env[62684]: DEBUG oslo_vmware.api [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Task: {'id': task-2052309, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.911780] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Acquiring lock "6b1f0e69-3915-40dc-b4ec-93ab174f12b6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1726.911780] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Lock "6b1f0e69-3915-40dc-b4ec-93ab174f12b6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1726.920877] env[62684]: DEBUG nova.network.neutron [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Successfully updated port: 45eb32aa-2917-4675-a758-bf202fb0fc08 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1727.085921] env[62684]: DEBUG oslo_vmware.api [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e573bd-6a70-5bf7-9c8c-694525d7dd36, 'name': SearchDatastore_Task, 'duration_secs': 0.027055} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1727.086316] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1727.087027] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1727.087027] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1727.087027] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1727.087283] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1727.087811] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bf2b174e-1524-472e-bb5e-a1b174831318 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.090783] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquiring lock "refresh_cache-0676806b-c1f0-4c1a-a12d-add2edf1588f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1727.090942] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquired lock "refresh_cache-0676806b-c1f0-4c1a-a12d-add2edf1588f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1727.091126] env[62684]: DEBUG nova.network.neutron [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1727.099771] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1727.099892] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1727.100935] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09b9e012-c9cd-4a35-a4cc-41fa6b3d9c48 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.108280] env[62684]: DEBUG oslo_vmware.api [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1727.108280] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]526b693f-6a03-a148-688b-529065869451" [ 1727.108280] env[62684]: _type = "Task" [ 1727.108280] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1727.121646] env[62684]: DEBUG oslo_vmware.api [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]526b693f-6a03-a148-688b-529065869451, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.304861] env[62684]: DEBUG nova.scheduler.client.report [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1727.390562] env[62684]: DEBUG oslo_vmware.api [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': task-2052308, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.687504} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1727.390562] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 4cbcfa1a-c034-4de7-ad25-4ad22316067e/4cbcfa1a-c034-4de7-ad25-4ad22316067e.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1727.390562] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1727.390562] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cd346261-2996-4d26-9b07-1cadf10a7465 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.396798] env[62684]: DEBUG nova.compute.manager [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1727.401601] env[62684]: DEBUG oslo_vmware.api [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Waiting for the task: (returnval){ [ 1727.401601] env[62684]: value = "task-2052310" [ 1727.401601] env[62684]: _type = "Task" [ 1727.401601] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1727.409571] env[62684]: DEBUG oslo_vmware.api [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Task: {'id': task-2052309, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.416624] env[62684]: DEBUG oslo_vmware.api [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': task-2052310, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.417613] env[62684]: DEBUG nova.compute.manager [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1727.425706] env[62684]: DEBUG nova.compute.manager [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1727.431904] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Acquiring lock "refresh_cache-91869c00-edd0-40a8-84df-d8842d750558" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1727.431904] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Acquired lock "refresh_cache-91869c00-edd0-40a8-84df-d8842d750558" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1727.431904] env[62684]: DEBUG nova.network.neutron [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1727.468803] env[62684]: DEBUG nova.virt.hardware [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1727.469170] env[62684]: DEBUG nova.virt.hardware [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1727.469433] env[62684]: DEBUG nova.virt.hardware [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1727.469596] env[62684]: DEBUG nova.virt.hardware [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1727.469796] env[62684]: DEBUG nova.virt.hardware [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1727.469961] env[62684]: DEBUG nova.virt.hardware [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1727.470214] env[62684]: DEBUG nova.virt.hardware [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1727.470424] env[62684]: DEBUG nova.virt.hardware [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1727.470638] env[62684]: DEBUG nova.virt.hardware [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1727.470823] env[62684]: DEBUG nova.virt.hardware [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1727.471224] env[62684]: DEBUG nova.virt.hardware [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1727.472318] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e3a6669-0e85-42d0-addd-2afd8dad7e9d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.485771] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db41cf11-44ed-4798-bf00-bd2c6414188a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.626083] env[62684]: DEBUG oslo_vmware.api [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]526b693f-6a03-a148-688b-529065869451, 'name': SearchDatastore_Task, 'duration_secs': 0.01377} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1727.626899] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba89f68f-a732-44ea-8731-2100284a3c79 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.635134] env[62684]: DEBUG oslo_vmware.api [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1727.635134] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d8d1db-bad3-238f-d441-68e104ca3f5e" [ 1727.635134] env[62684]: _type = "Task" [ 1727.635134] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1727.644438] env[62684]: DEBUG oslo_vmware.api [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d8d1db-bad3-238f-d441-68e104ca3f5e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.645679] env[62684]: DEBUG nova.network.neutron [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1727.813610] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.418s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1727.814741] env[62684]: DEBUG nova.compute.manager [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1727.819932] env[62684]: DEBUG oslo_concurrency.lockutils [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.357s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1727.821640] env[62684]: INFO nova.compute.claims [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1727.905267] env[62684]: DEBUG oslo_vmware.api [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Task: {'id': task-2052309, 'name': ReconfigVM_Task, 'duration_secs': 0.840739} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1727.909410] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Reconfigured VM instance instance-00000003 to attach disk [datastore2] c1580c72-9345-436e-b4f7-56d319248864/c1580c72-9345-436e-b4f7-56d319248864.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1727.915621] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-56305d67-f64b-43c4-b1e4-6d09652c927c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.925791] env[62684]: DEBUG oslo_vmware.api [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': task-2052310, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.509529} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1727.927277] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1727.927646] env[62684]: DEBUG oslo_vmware.api [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Waiting for the task: (returnval){ [ 1727.927646] env[62684]: value = "task-2052311" [ 1727.927646] env[62684]: _type = "Task" [ 1727.927646] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1727.929036] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28240411-ac4a-4fc0-89a6-9fc7637d70b5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.948055] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1727.975712] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Reconfiguring VM instance instance-00000004 to attach disk [datastore2] 4cbcfa1a-c034-4de7-ad25-4ad22316067e/4cbcfa1a-c034-4de7-ad25-4ad22316067e.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1727.975712] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1727.977915] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d3d147bf-96c6-4eb8-815e-9b12fe167574 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.992899] env[62684]: DEBUG oslo_vmware.api [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Task: {'id': task-2052311, 'name': Rename_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.993979] env[62684]: DEBUG nova.network.neutron [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Updating instance_info_cache with network_info: [{"id": "10023d3d-f0cd-49c9-984f-fb3f2af83e3b", "address": "fa:16:3e:2d:b8:1b", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.60", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10023d3d-f0", "ovs_interfaceid": "10023d3d-f0cd-49c9-984f-fb3f2af83e3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1728.000761] env[62684]: DEBUG oslo_vmware.api [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Waiting for the task: (returnval){ [ 1728.000761] env[62684]: value = "task-2052312" [ 1728.000761] env[62684]: _type = "Task" [ 1728.000761] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1728.009878] env[62684]: DEBUG oslo_vmware.api [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': task-2052312, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.069559] env[62684]: DEBUG nova.network.neutron [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1728.147666] env[62684]: DEBUG oslo_vmware.api [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d8d1db-bad3-238f-d441-68e104ca3f5e, 'name': SearchDatastore_Task, 'duration_secs': 0.019986} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1728.147854] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1728.148134] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb/8d53d8c3-6db8-4ebe-a35f-0f64602fafcb.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1728.148384] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f49de681-b957-43b7-b045-be9db74a170d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.154802] env[62684]: DEBUG oslo_vmware.api [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1728.154802] env[62684]: value = "task-2052313" [ 1728.154802] env[62684]: _type = "Task" [ 1728.154802] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1728.162852] env[62684]: DEBUG oslo_vmware.api [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052313, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.296341] env[62684]: DEBUG nova.compute.manager [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1728.297218] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d7b67b3-729e-4576-a513-d334c45d15cc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.328578] env[62684]: DEBUG nova.compute.utils [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1728.329755] env[62684]: DEBUG nova.compute.manager [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1728.329949] env[62684]: DEBUG nova.network.neutron [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1728.431644] env[62684]: DEBUG nova.policy [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '890eb2142b224f419ea944dff141330d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '131e2a2e3f70470bbcca23e556d09e6f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1728.444226] env[62684]: DEBUG oslo_vmware.api [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Task: {'id': task-2052311, 'name': Rename_Task, 'duration_secs': 0.150463} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1728.444980] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1728.445573] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d95ae09a-4684-4416-a7bb-2c95804838cf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.455074] env[62684]: DEBUG oslo_vmware.api [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Waiting for the task: (returnval){ [ 1728.455074] env[62684]: value = "task-2052314" [ 1728.455074] env[62684]: _type = "Task" [ 1728.455074] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1728.461421] env[62684]: DEBUG oslo_vmware.api [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Task: {'id': task-2052314, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.500253] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Releasing lock "refresh_cache-0676806b-c1f0-4c1a-a12d-add2edf1588f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1728.500589] env[62684]: DEBUG nova.compute.manager [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Instance network_info: |[{"id": "10023d3d-f0cd-49c9-984f-fb3f2af83e3b", "address": "fa:16:3e:2d:b8:1b", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.60", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10023d3d-f0", "ovs_interfaceid": "10023d3d-f0cd-49c9-984f-fb3f2af83e3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1728.501492] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2d:b8:1b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ded18042-834c-4792-b3e8-b1c377446432', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '10023d3d-f0cd-49c9-984f-fb3f2af83e3b', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1728.510368] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Creating folder: Project (147d85277da2482db0c24803c664cb93). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1728.514803] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d430f0de-6dde-4e77-9c75-08cd2c524791 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.517814] env[62684]: DEBUG nova.network.neutron [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Updating instance_info_cache with network_info: [{"id": "45eb32aa-2917-4675-a758-bf202fb0fc08", "address": "fa:16:3e:a5:47:91", "network": {"id": "c580420a-adb8-4862-ab3e-1f6f12d8b3c1", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1344327379-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4058aa2437d84bb49740f062876d66a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11b669be-fb26-4ef8-bdb6-c77ab9d06daf", "external-id": "nsx-vlan-transportzone-633", "segmentation_id": 633, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45eb32aa-29", "ovs_interfaceid": "45eb32aa-2917-4675-a758-bf202fb0fc08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1728.525693] env[62684]: DEBUG oslo_vmware.api [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': task-2052312, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.530588] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Created folder: Project (147d85277da2482db0c24803c664cb93) in parent group-v421118. [ 1728.530863] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Creating folder: Instances. Parent ref: group-v421138. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1728.531183] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-033878f1-560f-40e5-9962-4199ddb1a757 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.541089] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Created folder: Instances in parent group-v421138. [ 1728.541408] env[62684]: DEBUG oslo.service.loopingcall [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1728.541544] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1728.542646] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-03cb9648-c8d6-4d43-9ca3-5a54154a2e71 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.567680] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1728.567680] env[62684]: value = "task-2052317" [ 1728.567680] env[62684]: _type = "Task" [ 1728.567680] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1728.577418] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052317, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.672176] env[62684]: DEBUG oslo_vmware.api [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052313, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.720769] env[62684]: DEBUG nova.network.neutron [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Successfully updated port: b9e25052-ec41-470d-b549-89e542cb4366 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1728.810023] env[62684]: INFO nova.compute.manager [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] instance snapshotting [ 1728.813473] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6413892-3d73-4f6b-9226-2c14c3c3bfb5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.846607] env[62684]: DEBUG nova.compute.manager [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1728.851487] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28cc1fcd-8e90-431a-9b03-68f4661f0400 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.966457] env[62684]: DEBUG oslo_vmware.api [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Task: {'id': task-2052314, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.016866] env[62684]: DEBUG oslo_vmware.api [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': task-2052312, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.024577] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Releasing lock "refresh_cache-91869c00-edd0-40a8-84df-d8842d750558" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1729.024921] env[62684]: DEBUG nova.compute.manager [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Instance network_info: |[{"id": "45eb32aa-2917-4675-a758-bf202fb0fc08", "address": "fa:16:3e:a5:47:91", "network": {"id": "c580420a-adb8-4862-ab3e-1f6f12d8b3c1", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1344327379-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4058aa2437d84bb49740f062876d66a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11b669be-fb26-4ef8-bdb6-c77ab9d06daf", "external-id": "nsx-vlan-transportzone-633", "segmentation_id": 633, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45eb32aa-29", "ovs_interfaceid": "45eb32aa-2917-4675-a758-bf202fb0fc08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1729.025591] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a5:47:91', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '11b669be-fb26-4ef8-bdb6-c77ab9d06daf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '45eb32aa-2917-4675-a758-bf202fb0fc08', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1729.034257] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Creating folder: Project (4058aa2437d84bb49740f062876d66a2). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1729.034585] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-df5f07a9-2612-4f3e-a886-d3d024dbb705 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.045667] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Created folder: Project (4058aa2437d84bb49740f062876d66a2) in parent group-v421118. [ 1729.045667] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Creating folder: Instances. Parent ref: group-v421141. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1729.045667] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5ce871e1-eee6-4464-a295-ca3bf7f94e95 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.064834] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Created folder: Instances in parent group-v421141. [ 1729.066181] env[62684]: DEBUG oslo.service.loopingcall [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1729.066181] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1729.066181] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c6f8d3fd-b862-4989-9c1e-82441c05ee79 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.096365] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052317, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.097760] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1729.097760] env[62684]: value = "task-2052320" [ 1729.097760] env[62684]: _type = "Task" [ 1729.097760] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.105849] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052320, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.111485] env[62684]: DEBUG nova.network.neutron [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Successfully created port: eab61e5f-3e13-43bd-8778-1f31e15ef593 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1729.158496] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Acquiring lock "43d28811-26e4-4016-9f82-98349d4a05b7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1729.158810] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Lock "43d28811-26e4-4016-9f82-98349d4a05b7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1729.171310] env[62684]: DEBUG oslo_concurrency.lockutils [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Acquiring lock "73f27fc0-ebae-41c7-b292-14396f79a5a2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1729.171587] env[62684]: DEBUG oslo_concurrency.lockutils [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Lock "73f27fc0-ebae-41c7-b292-14396f79a5a2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1729.176671] env[62684]: DEBUG oslo_vmware.api [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052313, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.780884} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.177107] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb/8d53d8c3-6db8-4ebe-a35f-0f64602fafcb.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1729.177239] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1729.177518] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0bb69818-ef85-419e-a3bc-d1def6460b39 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.184903] env[62684]: DEBUG oslo_vmware.api [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1729.184903] env[62684]: value = "task-2052321" [ 1729.184903] env[62684]: _type = "Task" [ 1729.184903] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.186549] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efe23565-0389-4dae-abc4-d2340e058003 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.199489] env[62684]: DEBUG oslo_vmware.api [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052321, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.200476] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48f13a1d-9f29-4c3b-8641-e40f8d2813ef {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.239921] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Acquiring lock "refresh_cache-effc673a-103f-413b-88ac-6907ad1ee852" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1729.240057] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Acquired lock "refresh_cache-effc673a-103f-413b-88ac-6907ad1ee852" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1729.240275] env[62684]: DEBUG nova.network.neutron [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1729.242750] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12a7ea6f-a754-4d4a-b8c8-16ed7f42ef5c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.251833] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53114290-502b-40eb-9d1d-905f4e4df1ee {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.268984] env[62684]: DEBUG nova.compute.provider_tree [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1729.365302] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Creating Snapshot of the VM instance {{(pid=62684) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1729.365915] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ce0022fc-6e02-4f68-9f0e-d49e675ee2d5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.373765] env[62684]: DEBUG oslo_vmware.api [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Waiting for the task: (returnval){ [ 1729.373765] env[62684]: value = "task-2052322" [ 1729.373765] env[62684]: _type = "Task" [ 1729.373765] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.384262] env[62684]: DEBUG oslo_vmware.api [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052322, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.468200] env[62684]: DEBUG oslo_vmware.api [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Task: {'id': task-2052314, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.515257] env[62684]: DEBUG nova.compute.manager [req-1332b3d5-7cbc-4755-a5f9-e28d857a9c60 req-59ff6b44-240b-4756-85cd-afebbe228634 service nova] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Received event network-vif-plugged-10023d3d-f0cd-49c9-984f-fb3f2af83e3b {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1729.515257] env[62684]: DEBUG oslo_concurrency.lockutils [req-1332b3d5-7cbc-4755-a5f9-e28d857a9c60 req-59ff6b44-240b-4756-85cd-afebbe228634 service nova] Acquiring lock "0676806b-c1f0-4c1a-a12d-add2edf1588f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1729.515448] env[62684]: DEBUG oslo_concurrency.lockutils [req-1332b3d5-7cbc-4755-a5f9-e28d857a9c60 req-59ff6b44-240b-4756-85cd-afebbe228634 service nova] Lock "0676806b-c1f0-4c1a-a12d-add2edf1588f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1729.515580] env[62684]: DEBUG oslo_concurrency.lockutils [req-1332b3d5-7cbc-4755-a5f9-e28d857a9c60 req-59ff6b44-240b-4756-85cd-afebbe228634 service nova] Lock "0676806b-c1f0-4c1a-a12d-add2edf1588f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1729.515745] env[62684]: DEBUG nova.compute.manager [req-1332b3d5-7cbc-4755-a5f9-e28d857a9c60 req-59ff6b44-240b-4756-85cd-afebbe228634 service nova] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] No waiting events found dispatching network-vif-plugged-10023d3d-f0cd-49c9-984f-fb3f2af83e3b {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1729.515898] env[62684]: WARNING nova.compute.manager [req-1332b3d5-7cbc-4755-a5f9-e28d857a9c60 req-59ff6b44-240b-4756-85cd-afebbe228634 service nova] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Received unexpected event network-vif-plugged-10023d3d-f0cd-49c9-984f-fb3f2af83e3b for instance with vm_state building and task_state spawning. [ 1729.525151] env[62684]: DEBUG oslo_vmware.api [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': task-2052312, 'name': ReconfigVM_Task, 'duration_secs': 1.508149} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.525151] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Reconfigured VM instance instance-00000004 to attach disk [datastore2] 4cbcfa1a-c034-4de7-ad25-4ad22316067e/4cbcfa1a-c034-4de7-ad25-4ad22316067e.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1729.525750] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e244f214-c6f6-4d24-8c09-1fd36caaae71 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.532801] env[62684]: DEBUG oslo_vmware.api [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Waiting for the task: (returnval){ [ 1729.532801] env[62684]: value = "task-2052323" [ 1729.532801] env[62684]: _type = "Task" [ 1729.532801] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.542628] env[62684]: DEBUG oslo_vmware.api [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': task-2052323, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.545077] env[62684]: DEBUG nova.compute.manager [req-f34001ed-0cf1-4eed-ae11-35700499c8fc req-46c75241-2cc7-4813-a220-c13a18d461f6 service nova] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Received event network-vif-plugged-fafc337e-8380-4431-acaa-5ab65e6b32d7 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1729.545271] env[62684]: DEBUG oslo_concurrency.lockutils [req-f34001ed-0cf1-4eed-ae11-35700499c8fc req-46c75241-2cc7-4813-a220-c13a18d461f6 service nova] Acquiring lock "8d53d8c3-6db8-4ebe-a35f-0f64602fafcb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1729.545642] env[62684]: DEBUG oslo_concurrency.lockutils [req-f34001ed-0cf1-4eed-ae11-35700499c8fc req-46c75241-2cc7-4813-a220-c13a18d461f6 service nova] Lock "8d53d8c3-6db8-4ebe-a35f-0f64602fafcb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1729.545837] env[62684]: DEBUG oslo_concurrency.lockutils [req-f34001ed-0cf1-4eed-ae11-35700499c8fc req-46c75241-2cc7-4813-a220-c13a18d461f6 service nova] Lock "8d53d8c3-6db8-4ebe-a35f-0f64602fafcb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1729.546011] env[62684]: DEBUG nova.compute.manager [req-f34001ed-0cf1-4eed-ae11-35700499c8fc req-46c75241-2cc7-4813-a220-c13a18d461f6 service nova] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] No waiting events found dispatching network-vif-plugged-fafc337e-8380-4431-acaa-5ab65e6b32d7 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1729.546174] env[62684]: WARNING nova.compute.manager [req-f34001ed-0cf1-4eed-ae11-35700499c8fc req-46c75241-2cc7-4813-a220-c13a18d461f6 service nova] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Received unexpected event network-vif-plugged-fafc337e-8380-4431-acaa-5ab65e6b32d7 for instance with vm_state building and task_state spawning. [ 1729.546326] env[62684]: DEBUG nova.compute.manager [req-f34001ed-0cf1-4eed-ae11-35700499c8fc req-46c75241-2cc7-4813-a220-c13a18d461f6 service nova] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Received event network-changed-fafc337e-8380-4431-acaa-5ab65e6b32d7 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1729.546471] env[62684]: DEBUG nova.compute.manager [req-f34001ed-0cf1-4eed-ae11-35700499c8fc req-46c75241-2cc7-4813-a220-c13a18d461f6 service nova] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Refreshing instance network info cache due to event network-changed-fafc337e-8380-4431-acaa-5ab65e6b32d7. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1729.546658] env[62684]: DEBUG oslo_concurrency.lockutils [req-f34001ed-0cf1-4eed-ae11-35700499c8fc req-46c75241-2cc7-4813-a220-c13a18d461f6 service nova] Acquiring lock "refresh_cache-8d53d8c3-6db8-4ebe-a35f-0f64602fafcb" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1729.546791] env[62684]: DEBUG oslo_concurrency.lockutils [req-f34001ed-0cf1-4eed-ae11-35700499c8fc req-46c75241-2cc7-4813-a220-c13a18d461f6 service nova] Acquired lock "refresh_cache-8d53d8c3-6db8-4ebe-a35f-0f64602fafcb" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1729.546951] env[62684]: DEBUG nova.network.neutron [req-f34001ed-0cf1-4eed-ae11-35700499c8fc req-46c75241-2cc7-4813-a220-c13a18d461f6 service nova] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Refreshing network info cache for port fafc337e-8380-4431-acaa-5ab65e6b32d7 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1729.597777] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052317, 'name': CreateVM_Task, 'duration_secs': 0.530808} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.599992] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1729.606603] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1729.606603] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1729.606603] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1729.606603] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07eeff9d-87e5-4a8a-8b09-98eda4926f1e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.619015] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052320, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.619875] env[62684]: DEBUG oslo_vmware.api [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for the task: (returnval){ [ 1729.619875] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f9eb93-4319-1a8a-00c1-30685d1863b8" [ 1729.619875] env[62684]: _type = "Task" [ 1729.619875] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.630862] env[62684]: DEBUG oslo_vmware.api [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f9eb93-4319-1a8a-00c1-30685d1863b8, 'name': SearchDatastore_Task, 'duration_secs': 0.01153} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.631750] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1729.632095] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1729.632305] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1729.632459] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1729.632635] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1729.633457] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-34056320-25fa-4e22-984f-84f60b6c53c7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.666444] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1729.666444] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1729.667341] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af71c6b2-4352-4b1e-9477-c335665bcc96 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.674722] env[62684]: DEBUG oslo_vmware.api [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for the task: (returnval){ [ 1729.674722] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5298fa55-1804-bc1b-bda5-820188c05465" [ 1729.674722] env[62684]: _type = "Task" [ 1729.674722] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.683967] env[62684]: DEBUG oslo_vmware.api [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5298fa55-1804-bc1b-bda5-820188c05465, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.695128] env[62684]: DEBUG oslo_vmware.api [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052321, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.237471} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.695396] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1729.696202] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e52dee1-c144-4045-8391-94c9ab968546 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.721709] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Reconfiguring VM instance instance-00000005 to attach disk [datastore2] 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb/8d53d8c3-6db8-4ebe-a35f-0f64602fafcb.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1729.722405] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-975c1bca-82d4-4ddb-9a59-3bc38e38030b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.746210] env[62684]: DEBUG oslo_vmware.api [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1729.746210] env[62684]: value = "task-2052324" [ 1729.746210] env[62684]: _type = "Task" [ 1729.746210] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.755566] env[62684]: DEBUG oslo_vmware.api [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052324, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.772777] env[62684]: DEBUG nova.scheduler.client.report [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1729.832272] env[62684]: DEBUG nova.network.neutron [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1729.837333] env[62684]: DEBUG oslo_concurrency.lockutils [None req-52ca3b22-a582-451c-8b71-2d9c5c424457 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Acquiring lock "e4528a29-163d-4f5e-9497-6e6b90b290ba" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1729.837645] env[62684]: DEBUG oslo_concurrency.lockutils [None req-52ca3b22-a582-451c-8b71-2d9c5c424457 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Lock "e4528a29-163d-4f5e-9497-6e6b90b290ba" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1729.837896] env[62684]: DEBUG oslo_concurrency.lockutils [None req-52ca3b22-a582-451c-8b71-2d9c5c424457 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Acquiring lock "e4528a29-163d-4f5e-9497-6e6b90b290ba-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1729.838144] env[62684]: DEBUG oslo_concurrency.lockutils [None req-52ca3b22-a582-451c-8b71-2d9c5c424457 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Lock "e4528a29-163d-4f5e-9497-6e6b90b290ba-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1729.838399] env[62684]: DEBUG oslo_concurrency.lockutils [None req-52ca3b22-a582-451c-8b71-2d9c5c424457 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Lock "e4528a29-163d-4f5e-9497-6e6b90b290ba-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1729.841983] env[62684]: INFO nova.compute.manager [None req-52ca3b22-a582-451c-8b71-2d9c5c424457 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Terminating instance [ 1729.844392] env[62684]: DEBUG nova.compute.manager [None req-52ca3b22-a582-451c-8b71-2d9c5c424457 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1729.844499] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-52ca3b22-a582-451c-8b71-2d9c5c424457 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1729.845576] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7824bf3d-1ae2-4b4e-a1ad-66ac40255eba {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.855025] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-52ca3b22-a582-451c-8b71-2d9c5c424457 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1729.855649] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e316c57f-c25e-4dc9-b425-ba20605aa9de {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.863715] env[62684]: DEBUG nova.compute.manager [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1729.873333] env[62684]: DEBUG oslo_vmware.api [None req-52ca3b22-a582-451c-8b71-2d9c5c424457 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Waiting for the task: (returnval){ [ 1729.873333] env[62684]: value = "task-2052325" [ 1729.873333] env[62684]: _type = "Task" [ 1729.873333] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.887658] env[62684]: DEBUG oslo_vmware.api [None req-52ca3b22-a582-451c-8b71-2d9c5c424457 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Task: {'id': task-2052325, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.893917] env[62684]: DEBUG oslo_vmware.api [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052322, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.902018] env[62684]: DEBUG nova.virt.hardware [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1729.902937] env[62684]: DEBUG nova.virt.hardware [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1729.902937] env[62684]: DEBUG nova.virt.hardware [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1729.902937] env[62684]: DEBUG nova.virt.hardware [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1729.903668] env[62684]: DEBUG nova.virt.hardware [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1729.903668] env[62684]: DEBUG nova.virt.hardware [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1729.903668] env[62684]: DEBUG nova.virt.hardware [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1729.903828] env[62684]: DEBUG nova.virt.hardware [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1729.904022] env[62684]: DEBUG nova.virt.hardware [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1729.904235] env[62684]: DEBUG nova.virt.hardware [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1729.904411] env[62684]: DEBUG nova.virt.hardware [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1729.905514] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcc7eb9b-767d-4341-956f-4873f936325a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.914393] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0838209-f121-4549-9ec1-fffbb4ffb7d6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.970549] env[62684]: DEBUG oslo_vmware.api [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Task: {'id': task-2052314, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.045651] env[62684]: DEBUG oslo_vmware.api [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': task-2052323, 'name': Rename_Task, 'duration_secs': 0.298906} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1730.045651] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1730.045651] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-def756c9-b024-4935-b409-a754cff07085 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.053379] env[62684]: DEBUG oslo_vmware.api [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Waiting for the task: (returnval){ [ 1730.053379] env[62684]: value = "task-2052326" [ 1730.053379] env[62684]: _type = "Task" [ 1730.053379] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1730.062142] env[62684]: DEBUG oslo_vmware.api [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': task-2052326, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.110589] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052320, 'name': CreateVM_Task, 'duration_secs': 0.63077} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1730.110779] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1730.114518] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1730.114518] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1730.114518] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1730.114518] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dce54ee2-a445-43e7-8f4b-14bad5241982 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.117957] env[62684]: DEBUG oslo_vmware.api [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Waiting for the task: (returnval){ [ 1730.117957] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ed0f06-a1b3-436a-f107-415ef4f938bb" [ 1730.117957] env[62684]: _type = "Task" [ 1730.117957] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1730.128158] env[62684]: DEBUG oslo_vmware.api [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ed0f06-a1b3-436a-f107-415ef4f938bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.190749] env[62684]: DEBUG oslo_vmware.api [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5298fa55-1804-bc1b-bda5-820188c05465, 'name': SearchDatastore_Task, 'duration_secs': 0.016564} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1730.191532] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57c8b501-df24-4c4f-afc7-e58b63d96ffa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.195990] env[62684]: DEBUG nova.network.neutron [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Updating instance_info_cache with network_info: [{"id": "b9e25052-ec41-470d-b549-89e542cb4366", "address": "fa:16:3e:55:a2:89", "network": {"id": "532579c0-d485-4585-bc63-1bbd3af0367a", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1836758902-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e2e0ad7001b4b59805c1d6a3a0caf35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9e25052-ec", "ovs_interfaceid": "b9e25052-ec41-470d-b549-89e542cb4366", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1730.201660] env[62684]: DEBUG oslo_vmware.api [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for the task: (returnval){ [ 1730.201660] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]525bc458-c36f-8238-eeac-15327815365f" [ 1730.201660] env[62684]: _type = "Task" [ 1730.201660] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1730.211811] env[62684]: DEBUG oslo_vmware.api [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]525bc458-c36f-8238-eeac-15327815365f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.257625] env[62684]: DEBUG oslo_vmware.api [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052324, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.265844] env[62684]: DEBUG oslo_concurrency.lockutils [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquiring lock "dcb0a5b2-379e-44ff-a9b0-be615943c94e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1730.266439] env[62684]: DEBUG oslo_concurrency.lockutils [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "dcb0a5b2-379e-44ff-a9b0-be615943c94e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1730.278269] env[62684]: DEBUG oslo_concurrency.lockutils [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.458s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1730.278414] env[62684]: DEBUG nova.compute.manager [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1730.284036] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 4.810s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1730.388515] env[62684]: DEBUG oslo_vmware.api [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052322, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.393580] env[62684]: DEBUG oslo_vmware.api [None req-52ca3b22-a582-451c-8b71-2d9c5c424457 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Task: {'id': task-2052325, 'name': PowerOffVM_Task, 'duration_secs': 0.223386} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1730.393914] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-52ca3b22-a582-451c-8b71-2d9c5c424457 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1730.394202] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-52ca3b22-a582-451c-8b71-2d9c5c424457 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1730.394385] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b14950bf-6778-490c-8da3-4baed5626b49 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.470665] env[62684]: DEBUG oslo_vmware.api [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Task: {'id': task-2052314, 'name': PowerOnVM_Task, 'duration_secs': 1.913221} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1730.474289] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1730.474839] env[62684]: INFO nova.compute.manager [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Took 16.66 seconds to spawn the instance on the hypervisor. [ 1730.475231] env[62684]: DEBUG nova.compute.manager [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1730.476487] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78618596-1a09-482d-981d-216ed80df2e7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.503814] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-52ca3b22-a582-451c-8b71-2d9c5c424457 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1730.504159] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-52ca3b22-a582-451c-8b71-2d9c5c424457 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1730.504242] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-52ca3b22-a582-451c-8b71-2d9c5c424457 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Deleting the datastore file [datastore1] e4528a29-163d-4f5e-9497-6e6b90b290ba {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1730.504588] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b03efff3-0cb5-4772-a22e-00662e5b74e6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.512652] env[62684]: DEBUG oslo_vmware.api [None req-52ca3b22-a582-451c-8b71-2d9c5c424457 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Waiting for the task: (returnval){ [ 1730.512652] env[62684]: value = "task-2052328" [ 1730.512652] env[62684]: _type = "Task" [ 1730.512652] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1730.522126] env[62684]: DEBUG oslo_vmware.api [None req-52ca3b22-a582-451c-8b71-2d9c5c424457 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Task: {'id': task-2052328, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.547875] env[62684]: DEBUG nova.network.neutron [req-f34001ed-0cf1-4eed-ae11-35700499c8fc req-46c75241-2cc7-4813-a220-c13a18d461f6 service nova] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Updated VIF entry in instance network info cache for port fafc337e-8380-4431-acaa-5ab65e6b32d7. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1730.548196] env[62684]: DEBUG nova.network.neutron [req-f34001ed-0cf1-4eed-ae11-35700499c8fc req-46c75241-2cc7-4813-a220-c13a18d461f6 service nova] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Updating instance_info_cache with network_info: [{"id": "fafc337e-8380-4431-acaa-5ab65e6b32d7", "address": "fa:16:3e:f6:3e:99", "network": {"id": "2fa98fa4-ff7c-44e6-add0-693f55fd4b03", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2019954029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7855def9d0aa49abb7003ee504b9ccaf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfafc337e-83", "ovs_interfaceid": "fafc337e-8380-4431-acaa-5ab65e6b32d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1730.568269] env[62684]: DEBUG oslo_vmware.api [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': task-2052326, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.634530] env[62684]: DEBUG oslo_vmware.api [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ed0f06-a1b3-436a-f107-415ef4f938bb, 'name': SearchDatastore_Task, 'duration_secs': 0.022994} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1730.634530] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1730.634530] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1730.634530] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1730.701662] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Releasing lock "refresh_cache-effc673a-103f-413b-88ac-6907ad1ee852" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1730.701860] env[62684]: DEBUG nova.compute.manager [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Instance network_info: |[{"id": "b9e25052-ec41-470d-b549-89e542cb4366", "address": "fa:16:3e:55:a2:89", "network": {"id": "532579c0-d485-4585-bc63-1bbd3af0367a", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1836758902-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e2e0ad7001b4b59805c1d6a3a0caf35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9e25052-ec", "ovs_interfaceid": "b9e25052-ec41-470d-b549-89e542cb4366", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1730.702349] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:55:a2:89', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6f41e4aa-0d23-48c4-a359-574abb2e7b9a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b9e25052-ec41-470d-b549-89e542cb4366', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1730.711903] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Creating folder: Project (9e2e0ad7001b4b59805c1d6a3a0caf35). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1730.716839] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-27eadc22-5d9b-4feb-bb54-87f4e9bf8d69 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.729409] env[62684]: DEBUG oslo_vmware.api [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]525bc458-c36f-8238-eeac-15327815365f, 'name': SearchDatastore_Task, 'duration_secs': 0.017409} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1730.731122] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1730.731839] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 0676806b-c1f0-4c1a-a12d-add2edf1588f/0676806b-c1f0-4c1a-a12d-add2edf1588f.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1730.732461] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Created folder: Project (9e2e0ad7001b4b59805c1d6a3a0caf35) in parent group-v421118. [ 1730.733590] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Creating folder: Instances. Parent ref: group-v421145. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1730.733590] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1730.733590] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1730.733590] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-24e21212-4e27-4f73-8532-aa48ce22ded7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.736168] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-23ce199f-6df3-4e09-b165-63aa3b896d37 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.738675] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-19269f76-88da-4380-b3cb-46af8f3cba74 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.748770] env[62684]: DEBUG oslo_vmware.api [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for the task: (returnval){ [ 1730.748770] env[62684]: value = "task-2052331" [ 1730.748770] env[62684]: _type = "Task" [ 1730.748770] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1730.749688] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1730.749876] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1730.762545] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fca0c22a-a416-44e2-9fe5-f31edd9c918c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.764984] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Created folder: Instances in parent group-v421145. [ 1730.765559] env[62684]: DEBUG oslo.service.loopingcall [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1730.765559] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1730.767243] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b4e64a04-1c3f-44c9-a263-7ab3000e5ebb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.800035] env[62684]: DEBUG nova.compute.utils [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1730.813831] env[62684]: DEBUG oslo_vmware.api [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Waiting for the task: (returnval){ [ 1730.813831] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f44bd3-1d6b-8e32-12c3-cb6e5256d6d3" [ 1730.813831] env[62684]: _type = "Task" [ 1730.813831] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1730.813970] env[62684]: DEBUG oslo_vmware.api [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052331, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.816709] env[62684]: DEBUG oslo_vmware.api [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052324, 'name': ReconfigVM_Task, 'duration_secs': 0.687331} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1730.817500] env[62684]: DEBUG nova.compute.manager [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Not allocating networking since 'none' was specified. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1730.818133] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Reconfigured VM instance instance-00000005 to attach disk [datastore2] 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb/8d53d8c3-6db8-4ebe-a35f-0f64602fafcb.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1730.822517] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fc50c17f-8e5f-4086-b189-90cc62175e53 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.837789] env[62684]: DEBUG oslo_vmware.api [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1730.837789] env[62684]: value = "task-2052332" [ 1730.837789] env[62684]: _type = "Task" [ 1730.837789] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1730.838110] env[62684]: DEBUG oslo_vmware.api [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f44bd3-1d6b-8e32-12c3-cb6e5256d6d3, 'name': SearchDatastore_Task, 'duration_secs': 0.011784} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1730.840085] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1730.840085] env[62684]: value = "task-2052333" [ 1730.840085] env[62684]: _type = "Task" [ 1730.840085] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1730.843378] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-294056a2-5eb7-4092-86b0-663bfc0dcfc9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.859535] env[62684]: DEBUG oslo_vmware.api [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052332, 'name': Rename_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.866299] env[62684]: DEBUG oslo_vmware.api [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Waiting for the task: (returnval){ [ 1730.866299] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f903a4-fdb3-d22f-3e61-8c3f5e485a6a" [ 1730.866299] env[62684]: _type = "Task" [ 1730.866299] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1730.866981] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052333, 'name': CreateVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.879523] env[62684]: DEBUG oslo_vmware.api [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f903a4-fdb3-d22f-3e61-8c3f5e485a6a, 'name': SearchDatastore_Task, 'duration_secs': 0.012565} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1730.880008] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1730.880346] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 91869c00-edd0-40a8-84df-d8842d750558/91869c00-edd0-40a8-84df-d8842d750558.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1730.884105] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d808f315-3dca-4aae-ab66-872045faa4c1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.894113] env[62684]: DEBUG oslo_vmware.api [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052322, 'name': CreateSnapshot_Task, 'duration_secs': 1.072367} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1730.894507] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Created Snapshot of the VM instance {{(pid=62684) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1730.894801] env[62684]: DEBUG oslo_vmware.api [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Waiting for the task: (returnval){ [ 1730.894801] env[62684]: value = "task-2052334" [ 1730.894801] env[62684]: _type = "Task" [ 1730.894801] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1730.895558] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccd6a842-5879-44f0-bcd3-97a4fc70192a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.913811] env[62684]: DEBUG oslo_vmware.api [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Task: {'id': task-2052334, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.008034] env[62684]: INFO nova.compute.manager [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Took 24.11 seconds to build instance. [ 1731.035991] env[62684]: DEBUG oslo_vmware.api [None req-52ca3b22-a582-451c-8b71-2d9c5c424457 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Task: {'id': task-2052328, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.35378} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1731.035991] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-52ca3b22-a582-451c-8b71-2d9c5c424457 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1731.035991] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-52ca3b22-a582-451c-8b71-2d9c5c424457 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1731.035991] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-52ca3b22-a582-451c-8b71-2d9c5c424457 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1731.035991] env[62684]: INFO nova.compute.manager [None req-52ca3b22-a582-451c-8b71-2d9c5c424457 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1731.036348] env[62684]: DEBUG oslo.service.loopingcall [None req-52ca3b22-a582-451c-8b71-2d9c5c424457 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1731.036534] env[62684]: DEBUG nova.compute.manager [-] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1731.040046] env[62684]: DEBUG nova.network.neutron [-] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1731.052906] env[62684]: DEBUG oslo_concurrency.lockutils [req-f34001ed-0cf1-4eed-ae11-35700499c8fc req-46c75241-2cc7-4813-a220-c13a18d461f6 service nova] Releasing lock "refresh_cache-8d53d8c3-6db8-4ebe-a35f-0f64602fafcb" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1731.052906] env[62684]: DEBUG nova.compute.manager [req-f34001ed-0cf1-4eed-ae11-35700499c8fc req-46c75241-2cc7-4813-a220-c13a18d461f6 service nova] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Received event network-vif-plugged-45eb32aa-2917-4675-a758-bf202fb0fc08 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1731.052906] env[62684]: DEBUG oslo_concurrency.lockutils [req-f34001ed-0cf1-4eed-ae11-35700499c8fc req-46c75241-2cc7-4813-a220-c13a18d461f6 service nova] Acquiring lock "91869c00-edd0-40a8-84df-d8842d750558-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1731.052906] env[62684]: DEBUG oslo_concurrency.lockutils [req-f34001ed-0cf1-4eed-ae11-35700499c8fc req-46c75241-2cc7-4813-a220-c13a18d461f6 service nova] Lock "91869c00-edd0-40a8-84df-d8842d750558-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1731.052906] env[62684]: DEBUG oslo_concurrency.lockutils [req-f34001ed-0cf1-4eed-ae11-35700499c8fc req-46c75241-2cc7-4813-a220-c13a18d461f6 service nova] Lock "91869c00-edd0-40a8-84df-d8842d750558-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1731.053213] env[62684]: DEBUG nova.compute.manager [req-f34001ed-0cf1-4eed-ae11-35700499c8fc req-46c75241-2cc7-4813-a220-c13a18d461f6 service nova] [instance: 91869c00-edd0-40a8-84df-d8842d750558] No waiting events found dispatching network-vif-plugged-45eb32aa-2917-4675-a758-bf202fb0fc08 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1731.053213] env[62684]: WARNING nova.compute.manager [req-f34001ed-0cf1-4eed-ae11-35700499c8fc req-46c75241-2cc7-4813-a220-c13a18d461f6 service nova] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Received unexpected event network-vif-plugged-45eb32aa-2917-4675-a758-bf202fb0fc08 for instance with vm_state building and task_state spawning. [ 1731.072837] env[62684]: DEBUG oslo_vmware.api [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': task-2052326, 'name': PowerOnVM_Task, 'duration_secs': 0.622595} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1731.072837] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1731.072837] env[62684]: INFO nova.compute.manager [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Took 14.92 seconds to spawn the instance on the hypervisor. [ 1731.072837] env[62684]: DEBUG nova.compute.manager [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1731.073346] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e0cde03-afd4-4fbb-a279-45179b67215e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.264782] env[62684]: DEBUG oslo_vmware.api [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052331, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.291413] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Acquiring lock "5bc73032-45f9-4b5c-a4ea-e07c48e4f82b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1731.291670] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Lock "5bc73032-45f9-4b5c-a4ea-e07c48e4f82b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1731.316382] env[62684]: DEBUG nova.compute.manager [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1731.358780] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance e4528a29-163d-4f5e-9497-6e6b90b290ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1731.358780] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 0f9a525c-09b9-483e-b418-fea6e6e5dc4a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1731.358780] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance c1580c72-9345-436e-b4f7-56d319248864 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1731.358780] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 4cbcfa1a-c034-4de7-ad25-4ad22316067e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1731.358945] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1731.358945] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 0676806b-c1f0-4c1a-a12d-add2edf1588f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1731.358945] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 91869c00-edd0-40a8-84df-d8842d750558 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1731.358945] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance effc673a-103f-413b-88ac-6907ad1ee852 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1731.359092] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance fb7f38a0-bcfa-4d96-bde3-20d6f1d70112 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1731.359092] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance e1540aa6-12a4-4cff-a444-d47ee66c78d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1731.367751] env[62684]: DEBUG oslo_vmware.api [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052332, 'name': Rename_Task, 'duration_secs': 0.155792} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1731.371295] env[62684]: DEBUG nova.network.neutron [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Successfully updated port: eab61e5f-3e13-43bd-8778-1f31e15ef593 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1731.372783] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1731.377506] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6399451d-618d-452f-94e9-8e3ebd8405a0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.380949] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052333, 'name': CreateVM_Task, 'duration_secs': 0.411436} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1731.380949] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1731.380949] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1731.381129] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1731.381418] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1731.381671] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5907bb8-8e27-4362-84c8-8ad12fc2e373 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.386831] env[62684]: DEBUG oslo_vmware.api [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1731.386831] env[62684]: value = "task-2052335" [ 1731.386831] env[62684]: _type = "Task" [ 1731.386831] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.408133] env[62684]: DEBUG oslo_vmware.api [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052335, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.408133] env[62684]: DEBUG oslo_vmware.api [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Waiting for the task: (returnval){ [ 1731.408133] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]522dba23-1b51-6281-7e85-d24150fb1ac6" [ 1731.408133] env[62684]: _type = "Task" [ 1731.408133] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.424139] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Creating linked-clone VM from snapshot {{(pid=62684) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1731.428492] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-2abe91f3-fe9f-4453-90d9-3b67a02bce76 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.432996] env[62684]: DEBUG oslo_vmware.api [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Task: {'id': task-2052334, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.438839] env[62684]: DEBUG oslo_vmware.api [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]522dba23-1b51-6281-7e85-d24150fb1ac6, 'name': SearchDatastore_Task, 'duration_secs': 0.038773} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1731.441134] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1731.441134] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1731.441134] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1731.441266] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1731.441924] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1731.441924] env[62684]: DEBUG oslo_vmware.api [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Waiting for the task: (returnval){ [ 1731.441924] env[62684]: value = "task-2052336" [ 1731.441924] env[62684]: _type = "Task" [ 1731.441924] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.442182] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0338b252-215c-443d-b40e-866e8f11e49d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.457609] env[62684]: DEBUG oslo_vmware.api [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052336, 'name': CloneVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.459850] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1731.459995] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1731.460754] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ceae433-7772-406b-95a3-07ffa206e207 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.479964] env[62684]: DEBUG oslo_vmware.api [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Waiting for the task: (returnval){ [ 1731.479964] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529d9e52-35dc-5a44-96a8-fe276c98bf97" [ 1731.479964] env[62684]: _type = "Task" [ 1731.479964] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.489706] env[62684]: DEBUG oslo_vmware.api [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529d9e52-35dc-5a44-96a8-fe276c98bf97, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.512656] env[62684]: DEBUG oslo_concurrency.lockutils [None req-033f3890-1fa7-43f0-a377-30046bc89af7 tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Lock "c1580c72-9345-436e-b4f7-56d319248864" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.627s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1731.595247] env[62684]: INFO nova.compute.manager [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Took 23.61 seconds to build instance. [ 1731.761019] env[62684]: DEBUG oslo_vmware.api [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052331, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.565538} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1731.761019] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 0676806b-c1f0-4c1a-a12d-add2edf1588f/0676806b-c1f0-4c1a-a12d-add2edf1588f.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1731.761019] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1731.761019] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d549d903-3d2d-4fd0-ba30-18bf295611b5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.771200] env[62684]: DEBUG oslo_vmware.api [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for the task: (returnval){ [ 1731.771200] env[62684]: value = "task-2052337" [ 1731.771200] env[62684]: _type = "Task" [ 1731.771200] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.781083] env[62684]: DEBUG oslo_vmware.api [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052337, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.879027] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 6b1f0e69-3915-40dc-b4ec-93ab174f12b6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1731.882441] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Acquiring lock "refresh_cache-fb7f38a0-bcfa-4d96-bde3-20d6f1d70112" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1731.882441] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Acquired lock "refresh_cache-fb7f38a0-bcfa-4d96-bde3-20d6f1d70112" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1731.882441] env[62684]: DEBUG nova.network.neutron [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1731.902785] env[62684]: DEBUG oslo_vmware.api [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052335, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.922297] env[62684]: DEBUG oslo_vmware.api [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Task: {'id': task-2052334, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.837037} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1731.922297] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 91869c00-edd0-40a8-84df-d8842d750558/91869c00-edd0-40a8-84df-d8842d750558.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1731.922297] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1731.922297] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-97f28a82-778e-4d17-a1b0-27591133b000 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.927566] env[62684]: DEBUG oslo_vmware.api [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Waiting for the task: (returnval){ [ 1731.927566] env[62684]: value = "task-2052338" [ 1731.927566] env[62684]: _type = "Task" [ 1731.927566] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.940467] env[62684]: DEBUG oslo_vmware.api [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Task: {'id': task-2052338, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.955312] env[62684]: DEBUG oslo_vmware.api [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052336, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.991699] env[62684]: DEBUG oslo_vmware.api [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529d9e52-35dc-5a44-96a8-fe276c98bf97, 'name': SearchDatastore_Task, 'duration_secs': 0.041507} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1731.992099] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66b06178-2cda-4693-8652-93e31fc5a6e8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.998231] env[62684]: DEBUG oslo_vmware.api [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Waiting for the task: (returnval){ [ 1731.998231] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52693798-89c7-4fd2-82ea-99e75a598f64" [ 1731.998231] env[62684]: _type = "Task" [ 1731.998231] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.007419] env[62684]: DEBUG oslo_vmware.api [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52693798-89c7-4fd2-82ea-99e75a598f64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.018046] env[62684]: DEBUG nova.compute.manager [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1732.097089] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8304db6e-5a2c-40c2-98e9-7e1823502867 tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Lock "4cbcfa1a-c034-4de7-ad25-4ad22316067e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.124s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1732.286116] env[62684]: DEBUG oslo_vmware.api [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052337, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.10478} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1732.286116] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1732.286544] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ae58586-1897-4e21-b581-74d04bdf7e75 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.314171] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Reconfiguring VM instance instance-00000006 to attach disk [datastore2] 0676806b-c1f0-4c1a-a12d-add2edf1588f/0676806b-c1f0-4c1a-a12d-add2edf1588f.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1732.314610] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5b2ec568-418c-4e08-9282-71959f763144 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.334270] env[62684]: DEBUG nova.compute.manager [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1732.338401] env[62684]: DEBUG oslo_vmware.api [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for the task: (returnval){ [ 1732.338401] env[62684]: value = "task-2052339" [ 1732.338401] env[62684]: _type = "Task" [ 1732.338401] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.353015] env[62684]: DEBUG oslo_vmware.api [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052339, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.384061] env[62684]: DEBUG nova.virt.hardware [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1732.384061] env[62684]: DEBUG nova.virt.hardware [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1732.384061] env[62684]: DEBUG nova.virt.hardware [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1732.384476] env[62684]: DEBUG nova.virt.hardware [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1732.384476] env[62684]: DEBUG nova.virt.hardware [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1732.384476] env[62684]: DEBUG nova.virt.hardware [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1732.384476] env[62684]: DEBUG nova.virt.hardware [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1732.384664] env[62684]: DEBUG nova.virt.hardware [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1732.384839] env[62684]: DEBUG nova.virt.hardware [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1732.384999] env[62684]: DEBUG nova.virt.hardware [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1732.385183] env[62684]: DEBUG nova.virt.hardware [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1732.385915] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance d532b5fa-90a3-4f25-8684-4eabaa432c86 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1732.393791] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deeff529-855e-4cf3-a4d0-6755b24c80eb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.411021] env[62684]: DEBUG oslo_vmware.api [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052335, 'name': PowerOnVM_Task, 'duration_secs': 0.878438} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1732.413309] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1732.413505] env[62684]: INFO nova.compute.manager [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Took 11.97 seconds to spawn the instance on the hypervisor. [ 1732.413712] env[62684]: DEBUG nova.compute.manager [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1732.414822] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd22042e-546e-44ae-b86e-a70259a9919c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.420176] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63878f33-8fc0-4250-9972-32cacf715d33 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.442140] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Instance VIF info [] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1732.447667] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Creating folder: Project (754714aa35f1458b8ab43d5d1510c711). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1732.448364] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-493461ed-29dd-40ef-9af1-e6e46d9a3982 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.458287] env[62684]: DEBUG nova.network.neutron [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1732.462637] env[62684]: DEBUG oslo_vmware.api [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Task: {'id': task-2052338, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083267} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1732.462637] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1732.462637] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6a16723-95f8-4f1c-bdb0-df44520d2767 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.468528] env[62684]: DEBUG nova.network.neutron [-] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1732.469735] env[62684]: DEBUG oslo_vmware.api [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052336, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.471397] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Created folder: Project (754714aa35f1458b8ab43d5d1510c711) in parent group-v421118. [ 1732.471567] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Creating folder: Instances. Parent ref: group-v421149. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1732.472074] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0d3f9b5c-a6c2-47a7-9b30-bebf6087655e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.495022] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Reconfiguring VM instance instance-00000007 to attach disk [datastore2] 91869c00-edd0-40a8-84df-d8842d750558/91869c00-edd0-40a8-84df-d8842d750558.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1732.495022] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-90b42aa5-ae6c-41ec-ac22-24d4fcbb8bb4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.516175] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Created folder: Instances in parent group-v421149. [ 1732.516175] env[62684]: DEBUG oslo.service.loopingcall [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1732.519208] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1732.519913] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cc28fabe-76fe-4701-ab09-3f2ef1df761e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.535532] env[62684]: DEBUG oslo_vmware.api [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Waiting for the task: (returnval){ [ 1732.535532] env[62684]: value = "task-2052342" [ 1732.535532] env[62684]: _type = "Task" [ 1732.535532] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.541750] env[62684]: DEBUG oslo_vmware.api [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52693798-89c7-4fd2-82ea-99e75a598f64, 'name': SearchDatastore_Task, 'duration_secs': 0.015445} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1732.545431] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1732.546231] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] effc673a-103f-413b-88ac-6907ad1ee852/effc673a-103f-413b-88ac-6907ad1ee852.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1732.547216] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e983f2f5-4b23-437a-b9d3-a7297cd0444f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.549027] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1732.549027] env[62684]: value = "task-2052343" [ 1732.549027] env[62684]: _type = "Task" [ 1732.549027] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.555835] env[62684]: DEBUG oslo_vmware.api [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Task: {'id': task-2052342, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.557491] env[62684]: DEBUG oslo_vmware.api [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Waiting for the task: (returnval){ [ 1732.557491] env[62684]: value = "task-2052344" [ 1732.557491] env[62684]: _type = "Task" [ 1732.557491] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.558358] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1732.565039] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052343, 'name': CreateVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.572885] env[62684]: DEBUG oslo_vmware.api [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': task-2052344, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.600769] env[62684]: DEBUG nova.compute.manager [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1732.776253] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Acquiring lock "aec16a15-5d75-4ea6-800b-1bf67f762d89" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1732.776504] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Lock "aec16a15-5d75-4ea6-800b-1bf67f762d89" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1732.857340] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquiring lock "06751c34-0724-44ba-a263-ad27fcf2920f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1732.857340] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "06751c34-0724-44ba-a263-ad27fcf2920f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1732.866414] env[62684]: DEBUG oslo_vmware.api [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052339, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.901816] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 43d28811-26e4-4016-9f82-98349d4a05b7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1732.963168] env[62684]: INFO nova.compute.manager [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Took 24.60 seconds to build instance. [ 1732.970272] env[62684]: DEBUG oslo_vmware.api [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052336, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.973052] env[62684]: INFO nova.compute.manager [-] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Took 1.94 seconds to deallocate network for instance. [ 1733.054850] env[62684]: DEBUG oslo_vmware.api [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Task: {'id': task-2052342, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.056263] env[62684]: DEBUG nova.network.neutron [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Updating instance_info_cache with network_info: [{"id": "eab61e5f-3e13-43bd-8778-1f31e15ef593", "address": "fa:16:3e:a5:6a:61", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.238", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeab61e5f-3e", "ovs_interfaceid": "eab61e5f-3e13-43bd-8778-1f31e15ef593", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1733.078113] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052343, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.078395] env[62684]: DEBUG oslo_vmware.api [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': task-2052344, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.134939] env[62684]: DEBUG oslo_concurrency.lockutils [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1733.193610] env[62684]: DEBUG nova.compute.manager [req-8b1f7560-046d-48b0-8eb7-b84f274d08a9 req-6a6c315e-bd37-4951-bd07-143e8066d0c3 service nova] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Received event network-changed-10023d3d-f0cd-49c9-984f-fb3f2af83e3b {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1733.195027] env[62684]: DEBUG nova.compute.manager [req-8b1f7560-046d-48b0-8eb7-b84f274d08a9 req-6a6c315e-bd37-4951-bd07-143e8066d0c3 service nova] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Refreshing instance network info cache due to event network-changed-10023d3d-f0cd-49c9-984f-fb3f2af83e3b. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1733.195902] env[62684]: DEBUG oslo_concurrency.lockutils [req-8b1f7560-046d-48b0-8eb7-b84f274d08a9 req-6a6c315e-bd37-4951-bd07-143e8066d0c3 service nova] Acquiring lock "refresh_cache-0676806b-c1f0-4c1a-a12d-add2edf1588f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1733.195902] env[62684]: DEBUG oslo_concurrency.lockutils [req-8b1f7560-046d-48b0-8eb7-b84f274d08a9 req-6a6c315e-bd37-4951-bd07-143e8066d0c3 service nova] Acquired lock "refresh_cache-0676806b-c1f0-4c1a-a12d-add2edf1588f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1733.195902] env[62684]: DEBUG nova.network.neutron [req-8b1f7560-046d-48b0-8eb7-b84f274d08a9 req-6a6c315e-bd37-4951-bd07-143e8066d0c3 service nova] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Refreshing network info cache for port 10023d3d-f0cd-49c9-984f-fb3f2af83e3b {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1733.228928] env[62684]: DEBUG nova.compute.manager [req-d9531398-c0c1-4417-b483-31f0414b5b8c req-c7925925-1972-4d03-b53d-4427a99da7c5 service nova] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Received event network-changed-45eb32aa-2917-4675-a758-bf202fb0fc08 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1733.229879] env[62684]: DEBUG nova.compute.manager [req-d9531398-c0c1-4417-b483-31f0414b5b8c req-c7925925-1972-4d03-b53d-4427a99da7c5 service nova] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Refreshing instance network info cache due to event network-changed-45eb32aa-2917-4675-a758-bf202fb0fc08. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1733.230294] env[62684]: DEBUG oslo_concurrency.lockutils [req-d9531398-c0c1-4417-b483-31f0414b5b8c req-c7925925-1972-4d03-b53d-4427a99da7c5 service nova] Acquiring lock "refresh_cache-91869c00-edd0-40a8-84df-d8842d750558" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1733.230294] env[62684]: DEBUG oslo_concurrency.lockutils [req-d9531398-c0c1-4417-b483-31f0414b5b8c req-c7925925-1972-4d03-b53d-4427a99da7c5 service nova] Acquired lock "refresh_cache-91869c00-edd0-40a8-84df-d8842d750558" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1733.230457] env[62684]: DEBUG nova.network.neutron [req-d9531398-c0c1-4417-b483-31f0414b5b8c req-c7925925-1972-4d03-b53d-4427a99da7c5 service nova] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Refreshing network info cache for port 45eb32aa-2917-4675-a758-bf202fb0fc08 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1733.352708] env[62684]: DEBUG oslo_vmware.api [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052339, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.404963] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 73f27fc0-ebae-41c7-b292-14396f79a5a2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1733.467840] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cea66a44-b6df-48e7-8cc7-54ac883ea290 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "8d53d8c3-6db8-4ebe-a35f-0f64602fafcb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.121s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1733.468281] env[62684]: DEBUG oslo_vmware.api [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052336, 'name': CloneVM_Task} progress is 95%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.482686] env[62684]: DEBUG oslo_concurrency.lockutils [None req-52ca3b22-a582-451c-8b71-2d9c5c424457 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1733.554614] env[62684]: DEBUG oslo_vmware.api [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Task: {'id': task-2052342, 'name': ReconfigVM_Task, 'duration_secs': 0.977742} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1733.555033] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Reconfigured VM instance instance-00000007 to attach disk [datastore2] 91869c00-edd0-40a8-84df-d8842d750558/91869c00-edd0-40a8-84df-d8842d750558.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1733.559596] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a279541e-2b43-470a-9fbd-a5398a065220 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.563183] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Releasing lock "refresh_cache-fb7f38a0-bcfa-4d96-bde3-20d6f1d70112" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1733.563532] env[62684]: DEBUG nova.compute.manager [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Instance network_info: |[{"id": "eab61e5f-3e13-43bd-8778-1f31e15ef593", "address": "fa:16:3e:a5:6a:61", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.238", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeab61e5f-3e", "ovs_interfaceid": "eab61e5f-3e13-43bd-8778-1f31e15ef593", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1733.567535] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a5:6a:61', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ded18042-834c-4792-b3e8-b1c377446432', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eab61e5f-3e13-43bd-8778-1f31e15ef593', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1733.574812] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Creating folder: Project (131e2a2e3f70470bbcca23e556d09e6f). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1733.577983] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052343, 'name': CreateVM_Task, 'duration_secs': 0.546203} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1733.580382] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3502cb78-65a9-4449-bcbc-1894787e1b07 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.581877] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1733.582205] env[62684]: DEBUG oslo_vmware.api [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Waiting for the task: (returnval){ [ 1733.582205] env[62684]: value = "task-2052345" [ 1733.582205] env[62684]: _type = "Task" [ 1733.582205] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1733.583052] env[62684]: DEBUG oslo_concurrency.lockutils [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1733.583255] env[62684]: DEBUG oslo_concurrency.lockutils [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1733.583568] env[62684]: DEBUG oslo_concurrency.lockutils [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1733.584182] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de9e6aed-9508-4220-b0e7-0c0565520e52 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.591803] env[62684]: DEBUG oslo_vmware.api [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': task-2052344, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.767941} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1733.593522] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] effc673a-103f-413b-88ac-6907ad1ee852/effc673a-103f-413b-88ac-6907ad1ee852.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1733.593743] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1733.593992] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Created folder: Project (131e2a2e3f70470bbcca23e556d09e6f) in parent group-v421118. [ 1733.594652] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Creating folder: Instances. Parent ref: group-v421152. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1733.594652] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-56b926cd-c3b7-4be2-8830-0e6644b1015e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.596288] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-89cd7275-3aee-4a29-9d2e-cf6d5b8604a9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.602050] env[62684]: DEBUG oslo_vmware.api [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Task: {'id': task-2052345, 'name': Rename_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.602338] env[62684]: DEBUG oslo_vmware.api [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Waiting for the task: (returnval){ [ 1733.602338] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]526da15a-a977-3353-fd4e-d6590c850814" [ 1733.602338] env[62684]: _type = "Task" [ 1733.602338] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1733.607541] env[62684]: DEBUG oslo_vmware.api [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Waiting for the task: (returnval){ [ 1733.607541] env[62684]: value = "task-2052347" [ 1733.607541] env[62684]: _type = "Task" [ 1733.607541] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1733.615495] env[62684]: DEBUG oslo_vmware.api [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]526da15a-a977-3353-fd4e-d6590c850814, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.615768] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Created folder: Instances in parent group-v421152. [ 1733.616013] env[62684]: DEBUG oslo.service.loopingcall [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1733.616559] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1733.616777] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-054e676d-4e7a-470b-b789-b2526b59df25 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.635375] env[62684]: DEBUG oslo_vmware.api [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': task-2052347, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.640290] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1733.640290] env[62684]: value = "task-2052349" [ 1733.640290] env[62684]: _type = "Task" [ 1733.640290] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1733.648317] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052349, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.854795] env[62684]: DEBUG oslo_vmware.api [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052339, 'name': ReconfigVM_Task, 'duration_secs': 1.166034} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1733.855145] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Reconfigured VM instance instance-00000006 to attach disk [datastore2] 0676806b-c1f0-4c1a-a12d-add2edf1588f/0676806b-c1f0-4c1a-a12d-add2edf1588f.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1733.855801] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8995884f-7823-454d-9bad-ffc267d6b442 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.863617] env[62684]: DEBUG oslo_vmware.api [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for the task: (returnval){ [ 1733.863617] env[62684]: value = "task-2052350" [ 1733.863617] env[62684]: _type = "Task" [ 1733.863617] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1733.876168] env[62684]: DEBUG oslo_vmware.api [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052350, 'name': Rename_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.907913] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance dcb0a5b2-379e-44ff-a9b0-be615943c94e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1733.971492] env[62684]: DEBUG nova.compute.manager [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1733.978081] env[62684]: DEBUG oslo_vmware.api [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052336, 'name': CloneVM_Task, 'duration_secs': 2.399467} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1733.978709] env[62684]: INFO nova.virt.vmwareapi.vmops [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Created linked-clone VM from snapshot [ 1733.981153] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3170f334-f9ba-407d-8350-4722a6a16ef7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.991985] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Uploading image a24e3d0e-c7d5-4604-9590-4de2389d27ca {{(pid=62684) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1734.028870] env[62684]: DEBUG oslo_vmware.rw_handles [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1734.028870] env[62684]: value = "vm-421148" [ 1734.028870] env[62684]: _type = "VirtualMachine" [ 1734.028870] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1734.030024] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-6e26248a-3f2a-4a6b-9691-3835f89eaacb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.040674] env[62684]: DEBUG oslo_vmware.rw_handles [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Lease: (returnval){ [ 1734.040674] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521d02a3-a872-f411-9b5d-853cfb17db76" [ 1734.040674] env[62684]: _type = "HttpNfcLease" [ 1734.040674] env[62684]: } obtained for exporting VM: (result){ [ 1734.040674] env[62684]: value = "vm-421148" [ 1734.040674] env[62684]: _type = "VirtualMachine" [ 1734.040674] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1734.040674] env[62684]: DEBUG oslo_vmware.api [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Waiting for the lease: (returnval){ [ 1734.040674] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521d02a3-a872-f411-9b5d-853cfb17db76" [ 1734.040674] env[62684]: _type = "HttpNfcLease" [ 1734.040674] env[62684]: } to be ready. {{(pid=62684) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1734.050402] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1734.050402] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521d02a3-a872-f411-9b5d-853cfb17db76" [ 1734.050402] env[62684]: _type = "HttpNfcLease" [ 1734.050402] env[62684]: } is initializing. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1734.095452] env[62684]: DEBUG oslo_vmware.api [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Task: {'id': task-2052345, 'name': Rename_Task, 'duration_secs': 0.34741} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1734.095892] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1734.096071] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d6a3cff4-7e80-4da9-bdff-0e0abaf391bf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.102739] env[62684]: DEBUG oslo_vmware.api [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Waiting for the task: (returnval){ [ 1734.102739] env[62684]: value = "task-2052352" [ 1734.102739] env[62684]: _type = "Task" [ 1734.102739] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.119352] env[62684]: DEBUG oslo_vmware.api [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Task: {'id': task-2052352, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.124133] env[62684]: DEBUG oslo_vmware.api [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]526da15a-a977-3353-fd4e-d6590c850814, 'name': SearchDatastore_Task, 'duration_secs': 0.019399} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1734.124576] env[62684]: DEBUG oslo_concurrency.lockutils [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1734.124921] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1734.124966] env[62684]: DEBUG oslo_concurrency.lockutils [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1734.125138] env[62684]: DEBUG oslo_concurrency.lockutils [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1734.125311] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1734.128796] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f48aff3d-66c6-4b3f-968f-a611f71e5688 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.130857] env[62684]: DEBUG oslo_vmware.api [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': task-2052347, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.228446} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1734.131130] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1734.132294] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0ecb050-bade-4410-8c98-1acd9a817eca {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.145750] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1734.145936] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1734.156394] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0acf9856-52a6-4a51-81a4-a2005fa9599e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.172961] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Reconfiguring VM instance instance-00000008 to attach disk [datastore1] effc673a-103f-413b-88ac-6907ad1ee852/effc673a-103f-413b-88ac-6907ad1ee852.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1734.173865] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bfcebd66-c8b0-41dd-a63b-e2642c16a5c4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.195091] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052349, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.197905] env[62684]: DEBUG oslo_vmware.api [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Waiting for the task: (returnval){ [ 1734.197905] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]525b4e0d-3d5d-dd4f-979b-a215ef43982a" [ 1734.197905] env[62684]: _type = "Task" [ 1734.197905] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.202895] env[62684]: DEBUG oslo_vmware.api [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Waiting for the task: (returnval){ [ 1734.202895] env[62684]: value = "task-2052353" [ 1734.202895] env[62684]: _type = "Task" [ 1734.202895] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.209296] env[62684]: DEBUG oslo_vmware.api [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]525b4e0d-3d5d-dd4f-979b-a215ef43982a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.215212] env[62684]: DEBUG oslo_vmware.api [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': task-2052353, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.378025] env[62684]: DEBUG oslo_vmware.api [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052350, 'name': Rename_Task, 'duration_secs': 0.301248} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1734.379668] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1734.379668] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-56b7bae9-36b3-40a1-a8da-ce21e2ff4cc9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.386345] env[62684]: DEBUG oslo_vmware.api [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for the task: (returnval){ [ 1734.386345] env[62684]: value = "task-2052354" [ 1734.386345] env[62684]: _type = "Task" [ 1734.386345] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.391550] env[62684]: DEBUG nova.network.neutron [req-8b1f7560-046d-48b0-8eb7-b84f274d08a9 req-6a6c315e-bd37-4951-bd07-143e8066d0c3 service nova] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Updated VIF entry in instance network info cache for port 10023d3d-f0cd-49c9-984f-fb3f2af83e3b. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1734.392434] env[62684]: DEBUG nova.network.neutron [req-8b1f7560-046d-48b0-8eb7-b84f274d08a9 req-6a6c315e-bd37-4951-bd07-143e8066d0c3 service nova] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Updating instance_info_cache with network_info: [{"id": "10023d3d-f0cd-49c9-984f-fb3f2af83e3b", "address": "fa:16:3e:2d:b8:1b", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.60", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10023d3d-f0", "ovs_interfaceid": "10023d3d-f0cd-49c9-984f-fb3f2af83e3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1734.399814] env[62684]: DEBUG oslo_vmware.api [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052354, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.413030] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1734.413030] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1734.413030] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2432MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1734.506322] env[62684]: DEBUG oslo_concurrency.lockutils [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1734.567192] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1734.567192] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521d02a3-a872-f411-9b5d-853cfb17db76" [ 1734.567192] env[62684]: _type = "HttpNfcLease" [ 1734.567192] env[62684]: } is ready. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1734.567192] env[62684]: DEBUG oslo_vmware.rw_handles [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1734.567192] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521d02a3-a872-f411-9b5d-853cfb17db76" [ 1734.567192] env[62684]: _type = "HttpNfcLease" [ 1734.567192] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1734.572315] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fefcbf5a-258f-4b14-88d9-9d193e4a7b2c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.582507] env[62684]: DEBUG oslo_vmware.rw_handles [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e17ca3-2e62-e1a6-70cc-ea4180581025/disk-0.vmdk from lease info. {{(pid=62684) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1734.582507] env[62684]: DEBUG oslo_vmware.rw_handles [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e17ca3-2e62-e1a6-70cc-ea4180581025/disk-0.vmdk for reading. {{(pid=62684) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1734.659693] env[62684]: DEBUG oslo_vmware.api [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Task: {'id': task-2052352, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.663526] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052349, 'name': CreateVM_Task, 'duration_secs': 0.607324} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1734.663851] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1734.664501] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1734.664728] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1734.665072] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1734.665571] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3c3e223-9956-4f92-aa8e-045d0c846934 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.673930] env[62684]: DEBUG oslo_vmware.api [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Waiting for the task: (returnval){ [ 1734.673930] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52cf6752-eed2-e1ce-5e01-1fd52703753f" [ 1734.673930] env[62684]: _type = "Task" [ 1734.673930] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.701223] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-30ef888d-13b5-4610-841c-c7401b103f31 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.715659] env[62684]: DEBUG oslo_vmware.api [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52cf6752-eed2-e1ce-5e01-1fd52703753f, 'name': SearchDatastore_Task, 'duration_secs': 0.011053} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1734.722264] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1734.722264] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1734.722264] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1734.728286] env[62684]: DEBUG oslo_vmware.api [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]525b4e0d-3d5d-dd4f-979b-a215ef43982a, 'name': SearchDatastore_Task, 'duration_secs': 0.030651} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1734.733607] env[62684]: DEBUG oslo_vmware.api [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': task-2052353, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.733607] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a03bd92e-7878-4119-9770-93de7e6945be {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.743802] env[62684]: DEBUG oslo_vmware.api [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Waiting for the task: (returnval){ [ 1734.743802] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5267a1fd-3a7f-2140-adde-213a32457c11" [ 1734.743802] env[62684]: _type = "Task" [ 1734.743802] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.757991] env[62684]: DEBUG oslo_vmware.api [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5267a1fd-3a7f-2140-adde-213a32457c11, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.778786] env[62684]: DEBUG nova.network.neutron [req-d9531398-c0c1-4417-b483-31f0414b5b8c req-c7925925-1972-4d03-b53d-4427a99da7c5 service nova] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Updated VIF entry in instance network info cache for port 45eb32aa-2917-4675-a758-bf202fb0fc08. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1734.779163] env[62684]: DEBUG nova.network.neutron [req-d9531398-c0c1-4417-b483-31f0414b5b8c req-c7925925-1972-4d03-b53d-4427a99da7c5 service nova] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Updating instance_info_cache with network_info: [{"id": "45eb32aa-2917-4675-a758-bf202fb0fc08", "address": "fa:16:3e:a5:47:91", "network": {"id": "c580420a-adb8-4862-ab3e-1f6f12d8b3c1", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1344327379-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4058aa2437d84bb49740f062876d66a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11b669be-fb26-4ef8-bdb6-c77ab9d06daf", "external-id": "nsx-vlan-transportzone-633", "segmentation_id": 633, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45eb32aa-29", "ovs_interfaceid": "45eb32aa-2917-4675-a758-bf202fb0fc08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1734.901188] env[62684]: DEBUG oslo_concurrency.lockutils [req-8b1f7560-046d-48b0-8eb7-b84f274d08a9 req-6a6c315e-bd37-4951-bd07-143e8066d0c3 service nova] Releasing lock "refresh_cache-0676806b-c1f0-4c1a-a12d-add2edf1588f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1734.913262] env[62684]: DEBUG oslo_vmware.api [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052354, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.981798] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9383818b-8464-439a-ba3c-c9f27aa26750 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.998463] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25642cd6-f983-4c83-b098-8eeae44f8e2e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.047019] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-605e8be2-4051-4b81-9ec6-12ceb08cf3fe {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.055788] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bce09d47-6d05-46a2-9244-9663d3bb09c4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.074282] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1735.160143] env[62684]: DEBUG oslo_vmware.api [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Task: {'id': task-2052352, 'name': PowerOnVM_Task, 'duration_secs': 0.863468} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.160143] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1735.160143] env[62684]: INFO nova.compute.manager [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Took 10.02 seconds to spawn the instance on the hypervisor. [ 1735.160143] env[62684]: DEBUG nova.compute.manager [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1735.160143] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45a4c8c1-cb5d-4fd0-881c-15e1d7e2d8fc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.201838] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f44e6c81-d244-4f2b-ad94-e5a0c430fda7 tempest-DeleteServersAdminTestJSON-784470396 tempest-DeleteServersAdminTestJSON-784470396-project-admin] Acquiring lock "4cbcfa1a-c034-4de7-ad25-4ad22316067e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1735.202306] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f44e6c81-d244-4f2b-ad94-e5a0c430fda7 tempest-DeleteServersAdminTestJSON-784470396 tempest-DeleteServersAdminTestJSON-784470396-project-admin] Lock "4cbcfa1a-c034-4de7-ad25-4ad22316067e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1735.202306] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f44e6c81-d244-4f2b-ad94-e5a0c430fda7 tempest-DeleteServersAdminTestJSON-784470396 tempest-DeleteServersAdminTestJSON-784470396-project-admin] Acquiring lock "4cbcfa1a-c034-4de7-ad25-4ad22316067e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1735.202476] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f44e6c81-d244-4f2b-ad94-e5a0c430fda7 tempest-DeleteServersAdminTestJSON-784470396 tempest-DeleteServersAdminTestJSON-784470396-project-admin] Lock "4cbcfa1a-c034-4de7-ad25-4ad22316067e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1735.203111] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f44e6c81-d244-4f2b-ad94-e5a0c430fda7 tempest-DeleteServersAdminTestJSON-784470396 tempest-DeleteServersAdminTestJSON-784470396-project-admin] Lock "4cbcfa1a-c034-4de7-ad25-4ad22316067e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1735.212038] env[62684]: INFO nova.compute.manager [None req-f44e6c81-d244-4f2b-ad94-e5a0c430fda7 tempest-DeleteServersAdminTestJSON-784470396 tempest-DeleteServersAdminTestJSON-784470396-project-admin] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Terminating instance [ 1735.214246] env[62684]: DEBUG nova.compute.manager [None req-f44e6c81-d244-4f2b-ad94-e5a0c430fda7 tempest-DeleteServersAdminTestJSON-784470396 tempest-DeleteServersAdminTestJSON-784470396-project-admin] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1735.216146] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f44e6c81-d244-4f2b-ad94-e5a0c430fda7 tempest-DeleteServersAdminTestJSON-784470396 tempest-DeleteServersAdminTestJSON-784470396-project-admin] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1735.216146] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaa442ed-602b-4d20-85db-33b1ed17f55b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.234766] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f44e6c81-d244-4f2b-ad94-e5a0c430fda7 tempest-DeleteServersAdminTestJSON-784470396 tempest-DeleteServersAdminTestJSON-784470396-project-admin] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1735.238394] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6a0d297a-f706-4937-9142-a27b17046234 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.240879] env[62684]: DEBUG oslo_vmware.api [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': task-2052353, 'name': ReconfigVM_Task, 'duration_secs': 0.796243} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.241156] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Reconfigured VM instance instance-00000008 to attach disk [datastore1] effc673a-103f-413b-88ac-6907ad1ee852/effc673a-103f-413b-88ac-6907ad1ee852.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1735.242231] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-85b944dc-028c-4e9e-a771-898219773fbf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.247171] env[62684]: DEBUG oslo_vmware.api [None req-f44e6c81-d244-4f2b-ad94-e5a0c430fda7 tempest-DeleteServersAdminTestJSON-784470396 tempest-DeleteServersAdminTestJSON-784470396-project-admin] Waiting for the task: (returnval){ [ 1735.247171] env[62684]: value = "task-2052355" [ 1735.247171] env[62684]: _type = "Task" [ 1735.247171] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.249500] env[62684]: DEBUG oslo_vmware.api [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Waiting for the task: (returnval){ [ 1735.249500] env[62684]: value = "task-2052356" [ 1735.249500] env[62684]: _type = "Task" [ 1735.249500] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.280724] env[62684]: DEBUG oslo_vmware.api [None req-f44e6c81-d244-4f2b-ad94-e5a0c430fda7 tempest-DeleteServersAdminTestJSON-784470396 tempest-DeleteServersAdminTestJSON-784470396-project-admin] Task: {'id': task-2052355, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.288252] env[62684]: DEBUG oslo_concurrency.lockutils [req-d9531398-c0c1-4417-b483-31f0414b5b8c req-c7925925-1972-4d03-b53d-4427a99da7c5 service nova] Releasing lock "refresh_cache-91869c00-edd0-40a8-84df-d8842d750558" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1735.288715] env[62684]: DEBUG nova.compute.manager [req-d9531398-c0c1-4417-b483-31f0414b5b8c req-c7925925-1972-4d03-b53d-4427a99da7c5 service nova] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Received event network-vif-plugged-b9e25052-ec41-470d-b549-89e542cb4366 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1735.290025] env[62684]: DEBUG oslo_concurrency.lockutils [req-d9531398-c0c1-4417-b483-31f0414b5b8c req-c7925925-1972-4d03-b53d-4427a99da7c5 service nova] Acquiring lock "effc673a-103f-413b-88ac-6907ad1ee852-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1735.290025] env[62684]: DEBUG oslo_concurrency.lockutils [req-d9531398-c0c1-4417-b483-31f0414b5b8c req-c7925925-1972-4d03-b53d-4427a99da7c5 service nova] Lock "effc673a-103f-413b-88ac-6907ad1ee852-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1735.290025] env[62684]: DEBUG oslo_concurrency.lockutils [req-d9531398-c0c1-4417-b483-31f0414b5b8c req-c7925925-1972-4d03-b53d-4427a99da7c5 service nova] Lock "effc673a-103f-413b-88ac-6907ad1ee852-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1735.290025] env[62684]: DEBUG nova.compute.manager [req-d9531398-c0c1-4417-b483-31f0414b5b8c req-c7925925-1972-4d03-b53d-4427a99da7c5 service nova] [instance: effc673a-103f-413b-88ac-6907ad1ee852] No waiting events found dispatching network-vif-plugged-b9e25052-ec41-470d-b549-89e542cb4366 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1735.290025] env[62684]: WARNING nova.compute.manager [req-d9531398-c0c1-4417-b483-31f0414b5b8c req-c7925925-1972-4d03-b53d-4427a99da7c5 service nova] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Received unexpected event network-vif-plugged-b9e25052-ec41-470d-b549-89e542cb4366 for instance with vm_state building and task_state spawning. [ 1735.290293] env[62684]: DEBUG nova.compute.manager [req-d9531398-c0c1-4417-b483-31f0414b5b8c req-c7925925-1972-4d03-b53d-4427a99da7c5 service nova] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Received event network-changed-b9e25052-ec41-470d-b549-89e542cb4366 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1735.290293] env[62684]: DEBUG nova.compute.manager [req-d9531398-c0c1-4417-b483-31f0414b5b8c req-c7925925-1972-4d03-b53d-4427a99da7c5 service nova] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Refreshing instance network info cache due to event network-changed-b9e25052-ec41-470d-b549-89e542cb4366. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1735.290837] env[62684]: DEBUG oslo_concurrency.lockutils [req-d9531398-c0c1-4417-b483-31f0414b5b8c req-c7925925-1972-4d03-b53d-4427a99da7c5 service nova] Acquiring lock "refresh_cache-effc673a-103f-413b-88ac-6907ad1ee852" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1735.290837] env[62684]: DEBUG oslo_concurrency.lockutils [req-d9531398-c0c1-4417-b483-31f0414b5b8c req-c7925925-1972-4d03-b53d-4427a99da7c5 service nova] Acquired lock "refresh_cache-effc673a-103f-413b-88ac-6907ad1ee852" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1735.290837] env[62684]: DEBUG nova.network.neutron [req-d9531398-c0c1-4417-b483-31f0414b5b8c req-c7925925-1972-4d03-b53d-4427a99da7c5 service nova] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Refreshing network info cache for port b9e25052-ec41-470d-b549-89e542cb4366 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1735.292648] env[62684]: DEBUG oslo_vmware.api [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5267a1fd-3a7f-2140-adde-213a32457c11, 'name': SearchDatastore_Task, 'duration_secs': 0.01611} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.293284] env[62684]: DEBUG oslo_vmware.api [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': task-2052356, 'name': Rename_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.293729] env[62684]: DEBUG oslo_concurrency.lockutils [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1735.293997] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] e1540aa6-12a4-4cff-a444-d47ee66c78d7/e1540aa6-12a4-4cff-a444-d47ee66c78d7.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1735.294372] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1735.294593] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1735.294851] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4134f424-8159-4dc5-9db0-fda43160d022 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.297634] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-35754290-f365-4882-bf5f-a05428caf770 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.301714] env[62684]: DEBUG oslo_concurrency.lockutils [None req-41a8da4b-9414-4d94-af54-a1f73dacdb67 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "8d53d8c3-6db8-4ebe-a35f-0f64602fafcb" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1735.301965] env[62684]: DEBUG oslo_concurrency.lockutils [None req-41a8da4b-9414-4d94-af54-a1f73dacdb67 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "8d53d8c3-6db8-4ebe-a35f-0f64602fafcb" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1735.302176] env[62684]: DEBUG nova.compute.manager [None req-41a8da4b-9414-4d94-af54-a1f73dacdb67 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1735.305275] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7113db39-fe8f-4efe-9357-108f4b6620d6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.311558] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1735.311998] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1735.313667] env[62684]: DEBUG oslo_vmware.api [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Waiting for the task: (returnval){ [ 1735.313667] env[62684]: value = "task-2052357" [ 1735.313667] env[62684]: _type = "Task" [ 1735.313667] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.314633] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff4eaae2-b5d2-4ab1-85ea-5e1cb2f207d9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.324247] env[62684]: DEBUG nova.compute.manager [None req-41a8da4b-9414-4d94-af54-a1f73dacdb67 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62684) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1735.324929] env[62684]: DEBUG nova.objects.instance [None req-41a8da4b-9414-4d94-af54-a1f73dacdb67 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lazy-loading 'flavor' on Instance uuid 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1735.333757] env[62684]: DEBUG oslo_vmware.api [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Waiting for the task: (returnval){ [ 1735.333757] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52441713-0c60-81c8-379d-bc2f0da1fdc9" [ 1735.333757] env[62684]: _type = "Task" [ 1735.333757] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.344449] env[62684]: DEBUG oslo_vmware.api [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Task: {'id': task-2052357, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.368497] env[62684]: DEBUG oslo_vmware.api [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52441713-0c60-81c8-379d-bc2f0da1fdc9, 'name': SearchDatastore_Task, 'duration_secs': 0.012627} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.371122] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b41dec7-d4be-4e44-b2bb-d5c02b3d4e04 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.381334] env[62684]: DEBUG oslo_vmware.api [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Waiting for the task: (returnval){ [ 1735.381334] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]524b86f8-338f-7508-8ebc-de3ecb674676" [ 1735.381334] env[62684]: _type = "Task" [ 1735.381334] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.391014] env[62684]: DEBUG oslo_vmware.api [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]524b86f8-338f-7508-8ebc-de3ecb674676, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.400484] env[62684]: DEBUG oslo_vmware.api [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052354, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.579853] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1735.681846] env[62684]: INFO nova.compute.manager [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Took 26.16 seconds to build instance. [ 1735.769825] env[62684]: DEBUG oslo_vmware.api [None req-f44e6c81-d244-4f2b-ad94-e5a0c430fda7 tempest-DeleteServersAdminTestJSON-784470396 tempest-DeleteServersAdminTestJSON-784470396-project-admin] Task: {'id': task-2052355, 'name': PowerOffVM_Task, 'duration_secs': 0.221334} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.778554] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f44e6c81-d244-4f2b-ad94-e5a0c430fda7 tempest-DeleteServersAdminTestJSON-784470396 tempest-DeleteServersAdminTestJSON-784470396-project-admin] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1735.778554] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f44e6c81-d244-4f2b-ad94-e5a0c430fda7 tempest-DeleteServersAdminTestJSON-784470396 tempest-DeleteServersAdminTestJSON-784470396-project-admin] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1735.781922] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b0693418-69c2-4a99-82c7-0ff9f300f391 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.797841] env[62684]: DEBUG oslo_vmware.api [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': task-2052356, 'name': Rename_Task, 'duration_secs': 0.171398} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.798170] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1735.799179] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-290ca310-b6af-437f-a55e-21e0bf882fa5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.809900] env[62684]: DEBUG oslo_vmware.api [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Waiting for the task: (returnval){ [ 1735.809900] env[62684]: value = "task-2052359" [ 1735.809900] env[62684]: _type = "Task" [ 1735.809900] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.829020] env[62684]: DEBUG oslo_vmware.api [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': task-2052359, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.835832] env[62684]: DEBUG oslo_vmware.api [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Task: {'id': task-2052357, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.849606] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-41a8da4b-9414-4d94-af54-a1f73dacdb67 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1735.849978] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5ba684ac-9419-4072-bdbe-e082748c4c5c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.858814] env[62684]: DEBUG oslo_vmware.api [None req-41a8da4b-9414-4d94-af54-a1f73dacdb67 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1735.858814] env[62684]: value = "task-2052360" [ 1735.858814] env[62684]: _type = "Task" [ 1735.858814] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.868434] env[62684]: DEBUG oslo_vmware.api [None req-41a8da4b-9414-4d94-af54-a1f73dacdb67 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052360, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.890257] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f44e6c81-d244-4f2b-ad94-e5a0c430fda7 tempest-DeleteServersAdminTestJSON-784470396 tempest-DeleteServersAdminTestJSON-784470396-project-admin] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1735.890257] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f44e6c81-d244-4f2b-ad94-e5a0c430fda7 tempest-DeleteServersAdminTestJSON-784470396 tempest-DeleteServersAdminTestJSON-784470396-project-admin] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1735.890456] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-f44e6c81-d244-4f2b-ad94-e5a0c430fda7 tempest-DeleteServersAdminTestJSON-784470396 tempest-DeleteServersAdminTestJSON-784470396-project-admin] Deleting the datastore file [datastore2] 4cbcfa1a-c034-4de7-ad25-4ad22316067e {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1735.893191] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f0148300-c621-47db-86d1-3a2abfd5ef35 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.903573] env[62684]: DEBUG oslo_vmware.api [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]524b86f8-338f-7508-8ebc-de3ecb674676, 'name': SearchDatastore_Task, 'duration_secs': 0.012411} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.904362] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1735.905836] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] fb7f38a0-bcfa-4d96-bde3-20d6f1d70112/fb7f38a0-bcfa-4d96-bde3-20d6f1d70112.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1735.911861] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-561a6d51-69ab-480a-9870-697216efcda3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.914714] env[62684]: DEBUG oslo_vmware.api [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052354, 'name': PowerOnVM_Task, 'duration_secs': 1.446061} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.916779] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1735.919038] env[62684]: INFO nova.compute.manager [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Took 13.13 seconds to spawn the instance on the hypervisor. [ 1735.919038] env[62684]: DEBUG nova.compute.manager [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1735.919038] env[62684]: DEBUG oslo_vmware.api [None req-f44e6c81-d244-4f2b-ad94-e5a0c430fda7 tempest-DeleteServersAdminTestJSON-784470396 tempest-DeleteServersAdminTestJSON-784470396-project-admin] Waiting for the task: (returnval){ [ 1735.919038] env[62684]: value = "task-2052361" [ 1735.919038] env[62684]: _type = "Task" [ 1735.919038] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.920255] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b19e45e9-c242-4a89-ad81-d2143ba4d7b7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.929459] env[62684]: DEBUG oslo_vmware.api [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Waiting for the task: (returnval){ [ 1735.929459] env[62684]: value = "task-2052362" [ 1735.929459] env[62684]: _type = "Task" [ 1735.929459] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.944819] env[62684]: DEBUG oslo_vmware.api [None req-f44e6c81-d244-4f2b-ad94-e5a0c430fda7 tempest-DeleteServersAdminTestJSON-784470396 tempest-DeleteServersAdminTestJSON-784470396-project-admin] Task: {'id': task-2052361, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.953565] env[62684]: DEBUG oslo_vmware.api [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': task-2052362, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.087122] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62684) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1736.087747] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 5.806s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1736.088181] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.140s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1736.091823] env[62684]: INFO nova.compute.claims [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1736.188243] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2603c055-1027-4f22-9738-2c9c296c176b tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Lock "91869c00-edd0-40a8-84df-d8842d750558" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.682s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1736.323592] env[62684]: DEBUG oslo_vmware.api [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': task-2052359, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.348028] env[62684]: DEBUG oslo_vmware.api [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Task: {'id': task-2052357, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.686164} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.348028] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] e1540aa6-12a4-4cff-a444-d47ee66c78d7/e1540aa6-12a4-4cff-a444-d47ee66c78d7.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1736.348028] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1736.348028] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9f60938c-051b-4683-8fe6-a249866b10ef {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.360097] env[62684]: DEBUG oslo_vmware.api [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Waiting for the task: (returnval){ [ 1736.360097] env[62684]: value = "task-2052363" [ 1736.360097] env[62684]: _type = "Task" [ 1736.360097] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.385952] env[62684]: DEBUG oslo_vmware.api [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Task: {'id': task-2052363, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.386331] env[62684]: DEBUG oslo_vmware.api [None req-41a8da4b-9414-4d94-af54-a1f73dacdb67 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052360, 'name': PowerOffVM_Task, 'duration_secs': 0.260164} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.386577] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-41a8da4b-9414-4d94-af54-a1f73dacdb67 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1736.386754] env[62684]: DEBUG nova.compute.manager [None req-41a8da4b-9414-4d94-af54-a1f73dacdb67 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1736.388973] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2ad6d8c-73c9-48ff-9147-085166d15d8a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.433010] env[62684]: DEBUG oslo_vmware.api [None req-f44e6c81-d244-4f2b-ad94-e5a0c430fda7 tempest-DeleteServersAdminTestJSON-784470396 tempest-DeleteServersAdminTestJSON-784470396-project-admin] Task: {'id': task-2052361, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.282562} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.437615] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-f44e6c81-d244-4f2b-ad94-e5a0c430fda7 tempest-DeleteServersAdminTestJSON-784470396 tempest-DeleteServersAdminTestJSON-784470396-project-admin] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1736.437919] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f44e6c81-d244-4f2b-ad94-e5a0c430fda7 tempest-DeleteServersAdminTestJSON-784470396 tempest-DeleteServersAdminTestJSON-784470396-project-admin] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1736.438184] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f44e6c81-d244-4f2b-ad94-e5a0c430fda7 tempest-DeleteServersAdminTestJSON-784470396 tempest-DeleteServersAdminTestJSON-784470396-project-admin] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1736.438405] env[62684]: INFO nova.compute.manager [None req-f44e6c81-d244-4f2b-ad94-e5a0c430fda7 tempest-DeleteServersAdminTestJSON-784470396 tempest-DeleteServersAdminTestJSON-784470396-project-admin] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1736.438858] env[62684]: DEBUG oslo.service.loopingcall [None req-f44e6c81-d244-4f2b-ad94-e5a0c430fda7 tempest-DeleteServersAdminTestJSON-784470396 tempest-DeleteServersAdminTestJSON-784470396-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1736.439201] env[62684]: DEBUG nova.compute.manager [-] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1736.439352] env[62684]: DEBUG nova.network.neutron [-] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1736.451637] env[62684]: DEBUG oslo_vmware.api [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': task-2052362, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.460447] env[62684]: INFO nova.compute.manager [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Took 27.02 seconds to build instance. [ 1736.694219] env[62684]: DEBUG nova.compute.manager [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1736.718330] env[62684]: DEBUG nova.compute.manager [req-7a605a77-1196-492f-8e3a-3966cea0fc88 req-b43bd766-ed64-4c74-b161-f520453d6da5 service nova] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Received event network-vif-deleted-0e21d0c2-c9f1-4885-ba7f-b8e64973c91f {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1736.731326] env[62684]: DEBUG nova.network.neutron [req-d9531398-c0c1-4417-b483-31f0414b5b8c req-c7925925-1972-4d03-b53d-4427a99da7c5 service nova] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Updated VIF entry in instance network info cache for port b9e25052-ec41-470d-b549-89e542cb4366. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1736.732588] env[62684]: DEBUG nova.network.neutron [req-d9531398-c0c1-4417-b483-31f0414b5b8c req-c7925925-1972-4d03-b53d-4427a99da7c5 service nova] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Updating instance_info_cache with network_info: [{"id": "b9e25052-ec41-470d-b549-89e542cb4366", "address": "fa:16:3e:55:a2:89", "network": {"id": "532579c0-d485-4585-bc63-1bbd3af0367a", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1836758902-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e2e0ad7001b4b59805c1d6a3a0caf35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9e25052-ec", "ovs_interfaceid": "b9e25052-ec41-470d-b549-89e542cb4366", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1736.820953] env[62684]: DEBUG oslo_vmware.api [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': task-2052359, 'name': PowerOnVM_Task, 'duration_secs': 0.772191} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.820953] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1736.821102] env[62684]: INFO nova.compute.manager [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Took 9.40 seconds to spawn the instance on the hypervisor. [ 1736.821303] env[62684]: DEBUG nova.compute.manager [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1736.822688] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a246f122-cfff-4211-b956-f04dd9fb27c9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.883298] env[62684]: DEBUG oslo_vmware.api [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Task: {'id': task-2052363, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.111959} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.883965] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1736.886096] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7606e7dc-5d77-4afc-86ed-08a148b3501e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.906870] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Reconfiguring VM instance instance-0000000a to attach disk [datastore1] e1540aa6-12a4-4cff-a444-d47ee66c78d7/e1540aa6-12a4-4cff-a444-d47ee66c78d7.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1736.910047] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1ca102cd-5e99-47df-920c-fd72b07fbfd4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.926928] env[62684]: DEBUG oslo_concurrency.lockutils [None req-41a8da4b-9414-4d94-af54-a1f73dacdb67 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "8d53d8c3-6db8-4ebe-a35f-0f64602fafcb" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.624s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1736.935443] env[62684]: DEBUG oslo_vmware.api [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Waiting for the task: (returnval){ [ 1736.935443] env[62684]: value = "task-2052364" [ 1736.935443] env[62684]: _type = "Task" [ 1736.935443] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.956147] env[62684]: DEBUG oslo_vmware.api [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Task: {'id': task-2052364, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.956147] env[62684]: DEBUG oslo_vmware.api [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': task-2052362, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.960527] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8b512e01-c137-400b-827b-a87a3eaa8394 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Lock "0676806b-c1f0-4c1a-a12d-add2edf1588f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.538s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1737.090588] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1737.092576] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1737.092993] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1737.094248] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1737.094513] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1737.094732] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1737.094938] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62684) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1737.220811] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1737.239380] env[62684]: DEBUG oslo_concurrency.lockutils [req-d9531398-c0c1-4417-b483-31f0414b5b8c req-c7925925-1972-4d03-b53d-4427a99da7c5 service nova] Releasing lock "refresh_cache-effc673a-103f-413b-88ac-6907ad1ee852" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1737.239954] env[62684]: DEBUG nova.compute.manager [req-d9531398-c0c1-4417-b483-31f0414b5b8c req-c7925925-1972-4d03-b53d-4427a99da7c5 service nova] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Received event network-vif-plugged-eab61e5f-3e13-43bd-8778-1f31e15ef593 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1737.239954] env[62684]: DEBUG oslo_concurrency.lockutils [req-d9531398-c0c1-4417-b483-31f0414b5b8c req-c7925925-1972-4d03-b53d-4427a99da7c5 service nova] Acquiring lock "fb7f38a0-bcfa-4d96-bde3-20d6f1d70112-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1737.240388] env[62684]: DEBUG oslo_concurrency.lockutils [req-d9531398-c0c1-4417-b483-31f0414b5b8c req-c7925925-1972-4d03-b53d-4427a99da7c5 service nova] Lock "fb7f38a0-bcfa-4d96-bde3-20d6f1d70112-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1737.240388] env[62684]: DEBUG oslo_concurrency.lockutils [req-d9531398-c0c1-4417-b483-31f0414b5b8c req-c7925925-1972-4d03-b53d-4427a99da7c5 service nova] Lock "fb7f38a0-bcfa-4d96-bde3-20d6f1d70112-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1737.240932] env[62684]: DEBUG nova.compute.manager [req-d9531398-c0c1-4417-b483-31f0414b5b8c req-c7925925-1972-4d03-b53d-4427a99da7c5 service nova] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] No waiting events found dispatching network-vif-plugged-eab61e5f-3e13-43bd-8778-1f31e15ef593 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1737.240932] env[62684]: WARNING nova.compute.manager [req-d9531398-c0c1-4417-b483-31f0414b5b8c req-c7925925-1972-4d03-b53d-4427a99da7c5 service nova] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Received unexpected event network-vif-plugged-eab61e5f-3e13-43bd-8778-1f31e15ef593 for instance with vm_state building and task_state spawning. [ 1737.241136] env[62684]: DEBUG nova.compute.manager [req-d9531398-c0c1-4417-b483-31f0414b5b8c req-c7925925-1972-4d03-b53d-4427a99da7c5 service nova] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Received event network-changed-eab61e5f-3e13-43bd-8778-1f31e15ef593 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1737.241274] env[62684]: DEBUG nova.compute.manager [req-d9531398-c0c1-4417-b483-31f0414b5b8c req-c7925925-1972-4d03-b53d-4427a99da7c5 service nova] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Refreshing instance network info cache due to event network-changed-eab61e5f-3e13-43bd-8778-1f31e15ef593. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1737.241806] env[62684]: DEBUG oslo_concurrency.lockutils [req-d9531398-c0c1-4417-b483-31f0414b5b8c req-c7925925-1972-4d03-b53d-4427a99da7c5 service nova] Acquiring lock "refresh_cache-fb7f38a0-bcfa-4d96-bde3-20d6f1d70112" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1737.241806] env[62684]: DEBUG oslo_concurrency.lockutils [req-d9531398-c0c1-4417-b483-31f0414b5b8c req-c7925925-1972-4d03-b53d-4427a99da7c5 service nova] Acquired lock "refresh_cache-fb7f38a0-bcfa-4d96-bde3-20d6f1d70112" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1737.241910] env[62684]: DEBUG nova.network.neutron [req-d9531398-c0c1-4417-b483-31f0414b5b8c req-c7925925-1972-4d03-b53d-4427a99da7c5 service nova] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Refreshing network info cache for port eab61e5f-3e13-43bd-8778-1f31e15ef593 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1737.351263] env[62684]: INFO nova.compute.manager [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Took 25.30 seconds to build instance. [ 1737.453738] env[62684]: DEBUG oslo_vmware.api [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': task-2052362, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.462057] env[62684]: DEBUG oslo_vmware.api [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Task: {'id': task-2052364, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.463750] env[62684]: DEBUG nova.compute.manager [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1737.485886] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa9d9195-b032-4353-b92c-ed480b8134d9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.498137] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77f58b2e-8b37-461f-a505-3b500d723bc1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.535860] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eeb65a8-bdef-45e0-98c3-e97710acd799 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.547694] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97aa9bda-1d7f-4fbe-be78-607b0bec220c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.570238] env[62684]: DEBUG nova.compute.provider_tree [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1737.855356] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c46dc9df-03a8-45fb-ad42-e859278ed1bd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Lock "effc673a-103f-413b-88ac-6907ad1ee852" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.821s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1737.959468] env[62684]: DEBUG oslo_vmware.api [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': task-2052362, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.640174} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.963747] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] fb7f38a0-bcfa-4d96-bde3-20d6f1d70112/fb7f38a0-bcfa-4d96-bde3-20d6f1d70112.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1737.963906] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1737.965472] env[62684]: DEBUG oslo_vmware.api [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Task: {'id': task-2052364, 'name': ReconfigVM_Task, 'duration_secs': 0.54703} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.965472] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b9e3c8a1-b845-4398-8fc4-813d1e522194 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.968216] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Reconfigured VM instance instance-0000000a to attach disk [datastore1] e1540aa6-12a4-4cff-a444-d47ee66c78d7/e1540aa6-12a4-4cff-a444-d47ee66c78d7.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1737.969717] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ceeb4b32-0666-4dd5-a8e5-35b380ca41fe {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.981765] env[62684]: DEBUG oslo_vmware.api [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Waiting for the task: (returnval){ [ 1737.981765] env[62684]: value = "task-2052366" [ 1737.981765] env[62684]: _type = "Task" [ 1737.981765] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.982932] env[62684]: DEBUG oslo_vmware.api [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Waiting for the task: (returnval){ [ 1737.982932] env[62684]: value = "task-2052365" [ 1737.982932] env[62684]: _type = "Task" [ 1737.982932] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.010025] env[62684]: DEBUG oslo_vmware.api [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Task: {'id': task-2052366, 'name': Rename_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.010025] env[62684]: DEBUG oslo_vmware.api [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': task-2052365, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.012804] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1738.045060] env[62684]: DEBUG nova.network.neutron [-] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1738.073491] env[62684]: DEBUG nova.scheduler.client.report [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1738.187879] env[62684]: DEBUG nova.network.neutron [req-d9531398-c0c1-4417-b483-31f0414b5b8c req-c7925925-1972-4d03-b53d-4427a99da7c5 service nova] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Updated VIF entry in instance network info cache for port eab61e5f-3e13-43bd-8778-1f31e15ef593. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1738.188305] env[62684]: DEBUG nova.network.neutron [req-d9531398-c0c1-4417-b483-31f0414b5b8c req-c7925925-1972-4d03-b53d-4427a99da7c5 service nova] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Updating instance_info_cache with network_info: [{"id": "eab61e5f-3e13-43bd-8778-1f31e15ef593", "address": "fa:16:3e:a5:6a:61", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.238", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeab61e5f-3e", "ovs_interfaceid": "eab61e5f-3e13-43bd-8778-1f31e15ef593", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1738.364641] env[62684]: DEBUG nova.compute.manager [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1738.513024] env[62684]: DEBUG oslo_vmware.api [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': task-2052365, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094748} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.513024] env[62684]: DEBUG oslo_vmware.api [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Task: {'id': task-2052366, 'name': Rename_Task, 'duration_secs': 0.19888} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.513024] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1738.513024] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1738.513024] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a8caa6b-528b-45a8-a2d9-b26c1c866d2a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.521449] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-47a493f3-5d7f-4ad3-9c3a-9774a8979a73 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.548314] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Reconfiguring VM instance instance-00000009 to attach disk [datastore1] fb7f38a0-bcfa-4d96-bde3-20d6f1d70112/fb7f38a0-bcfa-4d96-bde3-20d6f1d70112.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1738.556025] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f9603f3-1ecf-49e6-8c9a-535485180308 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.583471] env[62684]: INFO nova.compute.manager [-] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Took 2.14 seconds to deallocate network for instance. [ 1738.586936] env[62684]: DEBUG oslo_vmware.api [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Waiting for the task: (returnval){ [ 1738.586936] env[62684]: value = "task-2052367" [ 1738.586936] env[62684]: _type = "Task" [ 1738.586936] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.587974] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.500s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1738.588480] env[62684]: DEBUG nova.compute.manager [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1738.596057] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Acquiring lock "0dbd52ac-c987-4728-974e-73e99465c5e7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1738.596371] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Lock "0dbd52ac-c987-4728-974e-73e99465c5e7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1738.596738] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.622s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1738.598265] env[62684]: INFO nova.compute.claims [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1738.613106] env[62684]: DEBUG oslo_vmware.api [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Waiting for the task: (returnval){ [ 1738.613106] env[62684]: value = "task-2052368" [ 1738.613106] env[62684]: _type = "Task" [ 1738.613106] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.621492] env[62684]: DEBUG oslo_vmware.api [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Task: {'id': task-2052367, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.627558] env[62684]: DEBUG oslo_vmware.api [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': task-2052368, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.696730] env[62684]: DEBUG oslo_concurrency.lockutils [req-d9531398-c0c1-4417-b483-31f0414b5b8c req-c7925925-1972-4d03-b53d-4427a99da7c5 service nova] Releasing lock "refresh_cache-fb7f38a0-bcfa-4d96-bde3-20d6f1d70112" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1738.905684] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1739.114665] env[62684]: DEBUG nova.compute.utils [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1739.116180] env[62684]: DEBUG oslo_vmware.api [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Task: {'id': task-2052367, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.120114] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f44e6c81-d244-4f2b-ad94-e5a0c430fda7 tempest-DeleteServersAdminTestJSON-784470396 tempest-DeleteServersAdminTestJSON-784470396-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1739.120114] env[62684]: DEBUG nova.compute.manager [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1739.120114] env[62684]: DEBUG nova.network.neutron [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1739.129784] env[62684]: DEBUG oslo_vmware.api [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': task-2052368, 'name': ReconfigVM_Task, 'duration_secs': 0.506126} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.130840] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Reconfigured VM instance instance-00000009 to attach disk [datastore1] fb7f38a0-bcfa-4d96-bde3-20d6f1d70112/fb7f38a0-bcfa-4d96-bde3-20d6f1d70112.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1739.131504] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c8f876a9-2e57-4a0c-9a3d-f39d804de571 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.139403] env[62684]: DEBUG oslo_vmware.api [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Waiting for the task: (returnval){ [ 1739.139403] env[62684]: value = "task-2052369" [ 1739.139403] env[62684]: _type = "Task" [ 1739.139403] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.153374] env[62684]: DEBUG oslo_vmware.api [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': task-2052369, 'name': Rename_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.288988] env[62684]: DEBUG nova.policy [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8710ca0285de44d49dca43dedc76b623', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '949b8296172b4f4aab8bd28c8f4a03d7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1739.371795] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8d0e0996-9d89-478a-ad1e-8a3586b21ebe tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Acquiring lock "c1580c72-9345-436e-b4f7-56d319248864" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1739.372420] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8d0e0996-9d89-478a-ad1e-8a3586b21ebe tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Lock "c1580c72-9345-436e-b4f7-56d319248864" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1739.372810] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8d0e0996-9d89-478a-ad1e-8a3586b21ebe tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Acquiring lock "c1580c72-9345-436e-b4f7-56d319248864-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1739.373082] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8d0e0996-9d89-478a-ad1e-8a3586b21ebe tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Lock "c1580c72-9345-436e-b4f7-56d319248864-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1739.373342] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8d0e0996-9d89-478a-ad1e-8a3586b21ebe tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Lock "c1580c72-9345-436e-b4f7-56d319248864-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1739.376866] env[62684]: INFO nova.compute.manager [None req-8d0e0996-9d89-478a-ad1e-8a3586b21ebe tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Terminating instance [ 1739.378965] env[62684]: DEBUG nova.compute.manager [None req-8d0e0996-9d89-478a-ad1e-8a3586b21ebe tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1739.378965] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8d0e0996-9d89-478a-ad1e-8a3586b21ebe tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1739.379899] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07bae43f-6829-4657-a709-c44f6dcc2638 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.388897] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d0e0996-9d89-478a-ad1e-8a3586b21ebe tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1739.389220] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6b1e8008-7651-476a-87f5-90bb6c1e375e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.403553] env[62684]: DEBUG oslo_vmware.api [None req-8d0e0996-9d89-478a-ad1e-8a3586b21ebe tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Waiting for the task: (returnval){ [ 1739.403553] env[62684]: value = "task-2052370" [ 1739.403553] env[62684]: _type = "Task" [ 1739.403553] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.417572] env[62684]: DEBUG oslo_vmware.api [None req-8d0e0996-9d89-478a-ad1e-8a3586b21ebe tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Task: {'id': task-2052370, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.611916] env[62684]: DEBUG oslo_vmware.api [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Task: {'id': task-2052367, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.624873] env[62684]: DEBUG nova.compute.manager [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1739.651071] env[62684]: DEBUG oslo_vmware.api [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': task-2052369, 'name': Rename_Task, 'duration_secs': 0.195846} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.653663] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1739.653999] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-114da8c1-b9ba-4929-afba-6d659a6902d8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.662194] env[62684]: DEBUG oslo_vmware.api [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Waiting for the task: (returnval){ [ 1739.662194] env[62684]: value = "task-2052371" [ 1739.662194] env[62684]: _type = "Task" [ 1739.662194] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.682211] env[62684]: DEBUG oslo_vmware.api [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': task-2052371, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.924080] env[62684]: DEBUG oslo_vmware.api [None req-8d0e0996-9d89-478a-ad1e-8a3586b21ebe tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Task: {'id': task-2052370, 'name': PowerOffVM_Task, 'duration_secs': 0.276176} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.926307] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d0e0996-9d89-478a-ad1e-8a3586b21ebe tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1739.926662] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8d0e0996-9d89-478a-ad1e-8a3586b21ebe tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1739.926768] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-02fd2dc6-4e34-4fe3-9111-fe12373bab27 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.021818] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8d0e0996-9d89-478a-ad1e-8a3586b21ebe tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1740.022115] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8d0e0996-9d89-478a-ad1e-8a3586b21ebe tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1740.022320] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d0e0996-9d89-478a-ad1e-8a3586b21ebe tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Deleting the datastore file [datastore2] c1580c72-9345-436e-b4f7-56d319248864 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1740.022639] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-15f84eae-61c4-4407-af2c-a48e72c6ece3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.037157] env[62684]: DEBUG oslo_vmware.api [None req-8d0e0996-9d89-478a-ad1e-8a3586b21ebe tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Waiting for the task: (returnval){ [ 1740.037157] env[62684]: value = "task-2052373" [ 1740.037157] env[62684]: _type = "Task" [ 1740.037157] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.050149] env[62684]: DEBUG oslo_vmware.api [None req-8d0e0996-9d89-478a-ad1e-8a3586b21ebe tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Task: {'id': task-2052373, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.101212] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f4b70a7-ebd9-472f-b4fa-eaaf54523f27 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.116527] env[62684]: DEBUG nova.compute.manager [req-0824a4b9-9afc-4e48-9314-3dd9940175ab req-8bd93d46-5247-411d-af10-e09a7696348f service nova] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Received event network-vif-deleted-53627d03-ce5e-439d-9055-b5abf4b099ed {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1740.129890] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0400d9f-59a4-4e50-b468-ee9a27045171 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.141358] env[62684]: DEBUG oslo_vmware.api [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Task: {'id': task-2052367, 'name': PowerOnVM_Task} progress is 91%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.181750] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-726766aa-7b96-4bf9-ba2e-a9e1ad66a9ba {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.191511] env[62684]: DEBUG oslo_vmware.api [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': task-2052371, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.195550] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44ebc444-09b1-467a-bfb3-b864e73072d4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.212997] env[62684]: DEBUG nova.compute.provider_tree [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1740.360317] env[62684]: DEBUG nova.network.neutron [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Successfully created port: f8d1bd12-b449-41ef-bd95-755f619b639a {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1740.522379] env[62684]: DEBUG oslo_concurrency.lockutils [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Acquiring lock "a4767855-0c1d-48c8-98cc-6532ff140b5c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1740.522648] env[62684]: DEBUG oslo_concurrency.lockutils [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Lock "a4767855-0c1d-48c8-98cc-6532ff140b5c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1740.547536] env[62684]: DEBUG oslo_vmware.api [None req-8d0e0996-9d89-478a-ad1e-8a3586b21ebe tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Task: {'id': task-2052373, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.357355} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.547794] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d0e0996-9d89-478a-ad1e-8a3586b21ebe tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1740.548069] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8d0e0996-9d89-478a-ad1e-8a3586b21ebe tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1740.548243] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8d0e0996-9d89-478a-ad1e-8a3586b21ebe tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1740.548423] env[62684]: INFO nova.compute.manager [None req-8d0e0996-9d89-478a-ad1e-8a3586b21ebe tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] [instance: c1580c72-9345-436e-b4f7-56d319248864] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1740.548662] env[62684]: DEBUG oslo.service.loopingcall [None req-8d0e0996-9d89-478a-ad1e-8a3586b21ebe tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1740.548859] env[62684]: DEBUG nova.compute.manager [-] [instance: c1580c72-9345-436e-b4f7-56d319248864] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1740.548965] env[62684]: DEBUG nova.network.neutron [-] [instance: c1580c72-9345-436e-b4f7-56d319248864] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1740.580323] env[62684]: DEBUG nova.compute.manager [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Stashing vm_state: active {{(pid=62684) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1740.624619] env[62684]: DEBUG oslo_vmware.api [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Task: {'id': task-2052367, 'name': PowerOnVM_Task, 'duration_secs': 1.645823} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.625029] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1740.625378] env[62684]: INFO nova.compute.manager [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Took 8.29 seconds to spawn the instance on the hypervisor. [ 1740.625620] env[62684]: DEBUG nova.compute.manager [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1740.626926] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef24587a-4155-4ada-bd3d-0b354cd99eb9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.644166] env[62684]: DEBUG nova.compute.manager [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1740.688659] env[62684]: DEBUG oslo_vmware.api [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': task-2052371, 'name': PowerOnVM_Task, 'duration_secs': 0.69236} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.688935] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1740.689222] env[62684]: INFO nova.compute.manager [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Took 10.83 seconds to spawn the instance on the hypervisor. [ 1740.689427] env[62684]: DEBUG nova.compute.manager [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1740.690387] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bbb6bc7-9829-4fe3-9d50-210217ae48e2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.716634] env[62684]: DEBUG nova.scheduler.client.report [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1740.747489] env[62684]: DEBUG nova.virt.hardware [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1740.747874] env[62684]: DEBUG nova.virt.hardware [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1740.748086] env[62684]: DEBUG nova.virt.hardware [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1740.748308] env[62684]: DEBUG nova.virt.hardware [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1740.748438] env[62684]: DEBUG nova.virt.hardware [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1740.748691] env[62684]: DEBUG nova.virt.hardware [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1740.748817] env[62684]: DEBUG nova.virt.hardware [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1740.748982] env[62684]: DEBUG nova.virt.hardware [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1740.749171] env[62684]: DEBUG nova.virt.hardware [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1740.749355] env[62684]: DEBUG nova.virt.hardware [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1740.749517] env[62684]: DEBUG nova.virt.hardware [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1740.750831] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18eb6eb7-6041-4690-83c5-524a8b7a0fb9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.759559] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb373f82-66ab-4717-9294-d26135af968d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.106842] env[62684]: DEBUG oslo_concurrency.lockutils [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1741.148919] env[62684]: INFO nova.compute.manager [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Took 19.72 seconds to build instance. [ 1741.208843] env[62684]: INFO nova.compute.manager [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Took 23.51 seconds to build instance. [ 1741.221980] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.625s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1741.222571] env[62684]: DEBUG nova.compute.manager [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1741.229142] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.667s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1741.230774] env[62684]: INFO nova.compute.claims [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1741.504370] env[62684]: DEBUG oslo_concurrency.lockutils [None req-55b959b2-8e05-47b8-a053-7dc19cb969a3 tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Acquiring lock "91869c00-edd0-40a8-84df-d8842d750558" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1741.506278] env[62684]: DEBUG oslo_concurrency.lockutils [None req-55b959b2-8e05-47b8-a053-7dc19cb969a3 tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Lock "91869c00-edd0-40a8-84df-d8842d750558" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1741.507908] env[62684]: DEBUG oslo_concurrency.lockutils [None req-55b959b2-8e05-47b8-a053-7dc19cb969a3 tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Acquiring lock "91869c00-edd0-40a8-84df-d8842d750558-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1741.507908] env[62684]: DEBUG oslo_concurrency.lockutils [None req-55b959b2-8e05-47b8-a053-7dc19cb969a3 tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Lock "91869c00-edd0-40a8-84df-d8842d750558-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1741.507908] env[62684]: DEBUG oslo_concurrency.lockutils [None req-55b959b2-8e05-47b8-a053-7dc19cb969a3 tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Lock "91869c00-edd0-40a8-84df-d8842d750558-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1741.511395] env[62684]: INFO nova.compute.manager [None req-55b959b2-8e05-47b8-a053-7dc19cb969a3 tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Terminating instance [ 1741.514607] env[62684]: DEBUG nova.compute.manager [None req-55b959b2-8e05-47b8-a053-7dc19cb969a3 tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1741.516194] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-55b959b2-8e05-47b8-a053-7dc19cb969a3 tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1741.517519] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eb64a31-3b59-4279-8132-918925dc81f6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.528364] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-55b959b2-8e05-47b8-a053-7dc19cb969a3 tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1741.528638] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c6f644c4-a110-46cf-b720-7f55bdc3e324 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.538390] env[62684]: DEBUG oslo_vmware.api [None req-55b959b2-8e05-47b8-a053-7dc19cb969a3 tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Waiting for the task: (returnval){ [ 1741.538390] env[62684]: value = "task-2052374" [ 1741.538390] env[62684]: _type = "Task" [ 1741.538390] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1741.555509] env[62684]: DEBUG oslo_vmware.api [None req-55b959b2-8e05-47b8-a053-7dc19cb969a3 tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Task: {'id': task-2052374, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.658057] env[62684]: DEBUG oslo_concurrency.lockutils [None req-eb49f483-0aa4-4ab1-8ea7-984bc3d3ae85 tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Lock "e1540aa6-12a4-4cff-a444-d47ee66c78d7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.241s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1741.711894] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2b5318e3-8d7f-4e93-8765-097f06b174a2 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Lock "fb7f38a0-bcfa-4d96-bde3-20d6f1d70112" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.031s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1741.740103] env[62684]: DEBUG nova.compute.utils [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1741.743879] env[62684]: DEBUG nova.compute.manager [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1741.743879] env[62684]: DEBUG nova.network.neutron [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1741.791689] env[62684]: DEBUG nova.compute.manager [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1741.791689] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1deb913e-6aef-451a-8f8a-c72ff0e00e68 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.020479] env[62684]: DEBUG nova.policy [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3796eb780d684cdcad4acc92ae15fa6e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e169ffb4120143dca6d67108986e62f6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1742.049957] env[62684]: DEBUG oslo_vmware.api [None req-55b959b2-8e05-47b8-a053-7dc19cb969a3 tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Task: {'id': task-2052374, 'name': PowerOffVM_Task, 'duration_secs': 0.274581} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1742.050255] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-55b959b2-8e05-47b8-a053-7dc19cb969a3 tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1742.050486] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-55b959b2-8e05-47b8-a053-7dc19cb969a3 tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1742.051153] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b0cd162c-3913-4feb-a3e5-87ef52feb986 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.128825] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-55b959b2-8e05-47b8-a053-7dc19cb969a3 tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1742.128825] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-55b959b2-8e05-47b8-a053-7dc19cb969a3 tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1742.128825] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-55b959b2-8e05-47b8-a053-7dc19cb969a3 tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Deleting the datastore file [datastore2] 91869c00-edd0-40a8-84df-d8842d750558 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1742.128825] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5058dddd-58c4-4c5e-901d-923fb886f1b5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.136295] env[62684]: DEBUG oslo_vmware.api [None req-55b959b2-8e05-47b8-a053-7dc19cb969a3 tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Waiting for the task: (returnval){ [ 1742.136295] env[62684]: value = "task-2052376" [ 1742.136295] env[62684]: _type = "Task" [ 1742.136295] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1742.149258] env[62684]: DEBUG oslo_vmware.api [None req-55b959b2-8e05-47b8-a053-7dc19cb969a3 tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Task: {'id': task-2052376, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.162769] env[62684]: DEBUG nova.compute.manager [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1742.214948] env[62684]: DEBUG nova.compute.manager [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1742.254026] env[62684]: DEBUG nova.compute.manager [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1742.307026] env[62684]: INFO nova.compute.manager [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] instance snapshotting [ 1742.307026] env[62684]: WARNING nova.compute.manager [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 1742.314297] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc367f04-e2d3-4e1b-bc34-30b1e56c28cd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.343289] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53f455fe-2a46-439c-ba34-bc14f4467589 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.618408] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03117392-bc0c-4987-ba2a-a1bcfa38bd66 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.627049] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51874f6f-9eeb-453b-92d3-fc76999d0f80 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.668382] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5200c71-db60-40f8-8d7e-4d96c72689f0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.683146] env[62684]: DEBUG oslo_vmware.api [None req-55b959b2-8e05-47b8-a053-7dc19cb969a3 tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Task: {'id': task-2052376, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.401113} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1742.683146] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-55b959b2-8e05-47b8-a053-7dc19cb969a3 tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1742.683146] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-55b959b2-8e05-47b8-a053-7dc19cb969a3 tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1742.683704] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-55b959b2-8e05-47b8-a053-7dc19cb969a3 tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1742.684755] env[62684]: INFO nova.compute.manager [None req-55b959b2-8e05-47b8-a053-7dc19cb969a3 tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1742.684755] env[62684]: DEBUG oslo.service.loopingcall [None req-55b959b2-8e05-47b8-a053-7dc19cb969a3 tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1742.685416] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82e56037-8f88-4781-ac5d-a3434f94f9aa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.689710] env[62684]: DEBUG nova.compute.manager [-] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1742.689822] env[62684]: DEBUG nova.network.neutron [-] [instance: 91869c00-edd0-40a8-84df-d8842d750558] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1742.704144] env[62684]: DEBUG nova.compute.provider_tree [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1742.706457] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1742.734862] env[62684]: DEBUG oslo_concurrency.lockutils [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1742.762015] env[62684]: DEBUG nova.network.neutron [-] [instance: c1580c72-9345-436e-b4f7-56d319248864] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1742.855480] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Creating Snapshot of the VM instance {{(pid=62684) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1742.855941] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-4ddfdd98-08b6-4b01-8613-ac27cd2fab96 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.870952] env[62684]: DEBUG oslo_vmware.api [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1742.870952] env[62684]: value = "task-2052377" [ 1742.870952] env[62684]: _type = "Task" [ 1742.870952] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1742.888763] env[62684]: DEBUG oslo_vmware.api [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052377, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.049138] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Acquiring lock "17d30180-9770-4329-a6d8-757a93514a96" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1743.049718] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Lock "17d30180-9770-4329-a6d8-757a93514a96" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1743.165790] env[62684]: DEBUG nova.network.neutron [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Successfully updated port: f8d1bd12-b449-41ef-bd95-755f619b639a {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1743.209673] env[62684]: DEBUG nova.scheduler.client.report [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1743.271021] env[62684]: INFO nova.compute.manager [-] [instance: c1580c72-9345-436e-b4f7-56d319248864] Took 2.72 seconds to deallocate network for instance. [ 1743.271021] env[62684]: DEBUG nova.compute.manager [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1743.301209] env[62684]: DEBUG nova.virt.hardware [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1743.301484] env[62684]: DEBUG nova.virt.hardware [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1743.301640] env[62684]: DEBUG nova.virt.hardware [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1743.301822] env[62684]: DEBUG nova.virt.hardware [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1743.303016] env[62684]: DEBUG nova.virt.hardware [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1743.303016] env[62684]: DEBUG nova.virt.hardware [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1743.303016] env[62684]: DEBUG nova.virt.hardware [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1743.303016] env[62684]: DEBUG nova.virt.hardware [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1743.303016] env[62684]: DEBUG nova.virt.hardware [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1743.303270] env[62684]: DEBUG nova.virt.hardware [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1743.303270] env[62684]: DEBUG nova.virt.hardware [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1743.304186] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e410f22-a666-4ea0-ba3b-ef40d9a70318 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.313152] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf557cba-4b12-4e76-a862-bda58ac0e6ba {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.329826] env[62684]: DEBUG nova.network.neutron [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Successfully created port: 8f6b3e69-1998-4808-9c1a-1224c8ab5363 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1743.382981] env[62684]: DEBUG oslo_vmware.api [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052377, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.638976] env[62684]: DEBUG nova.compute.manager [req-04bbfe41-b63b-4483-89eb-b81d418d4a2c req-1211adc6-e361-404d-b4d3-6cd3c916210a service nova] [instance: c1580c72-9345-436e-b4f7-56d319248864] Received event network-vif-deleted-c089de91-ca6e-40fe-8783-5b1644292445 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1743.672228] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Acquiring lock "refresh_cache-d532b5fa-90a3-4f25-8684-4eabaa432c86" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1743.672282] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Acquired lock "refresh_cache-d532b5fa-90a3-4f25-8684-4eabaa432c86" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1743.672417] env[62684]: DEBUG nova.network.neutron [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1743.713517] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.488s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1743.713849] env[62684]: DEBUG nova.compute.manager [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1743.716746] env[62684]: DEBUG oslo_concurrency.lockutils [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.582s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1743.718342] env[62684]: INFO nova.compute.claims [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1743.779646] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8d0e0996-9d89-478a-ad1e-8a3586b21ebe tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1743.859444] env[62684]: DEBUG nova.compute.manager [req-cf545891-c5aa-4b87-83c8-ad01be90561a req-5dd8c7ec-c54f-4597-8f47-2382ab0ee4da service nova] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Received event network-vif-plugged-f8d1bd12-b449-41ef-bd95-755f619b639a {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1743.860370] env[62684]: DEBUG oslo_concurrency.lockutils [req-cf545891-c5aa-4b87-83c8-ad01be90561a req-5dd8c7ec-c54f-4597-8f47-2382ab0ee4da service nova] Acquiring lock "d532b5fa-90a3-4f25-8684-4eabaa432c86-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1743.860370] env[62684]: DEBUG oslo_concurrency.lockutils [req-cf545891-c5aa-4b87-83c8-ad01be90561a req-5dd8c7ec-c54f-4597-8f47-2382ab0ee4da service nova] Lock "d532b5fa-90a3-4f25-8684-4eabaa432c86-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1743.860717] env[62684]: DEBUG oslo_concurrency.lockutils [req-cf545891-c5aa-4b87-83c8-ad01be90561a req-5dd8c7ec-c54f-4597-8f47-2382ab0ee4da service nova] Lock "d532b5fa-90a3-4f25-8684-4eabaa432c86-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1743.860905] env[62684]: DEBUG nova.compute.manager [req-cf545891-c5aa-4b87-83c8-ad01be90561a req-5dd8c7ec-c54f-4597-8f47-2382ab0ee4da service nova] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] No waiting events found dispatching network-vif-plugged-f8d1bd12-b449-41ef-bd95-755f619b639a {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1743.862491] env[62684]: WARNING nova.compute.manager [req-cf545891-c5aa-4b87-83c8-ad01be90561a req-5dd8c7ec-c54f-4597-8f47-2382ab0ee4da service nova] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Received unexpected event network-vif-plugged-f8d1bd12-b449-41ef-bd95-755f619b639a for instance with vm_state building and task_state spawning. [ 1743.885424] env[62684]: DEBUG oslo_vmware.api [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052377, 'name': CreateSnapshot_Task, 'duration_secs': 0.727778} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1743.885424] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Created Snapshot of the VM instance {{(pid=62684) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1743.886083] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2b243b8-ebaf-45b3-bcb2-0a880189ff8e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.089383] env[62684]: DEBUG nova.compute.manager [None req-bb78d3cc-07ea-4447-b406-d07ce99ae92c tempest-ServerDiagnosticsV248Test-289604052 tempest-ServerDiagnosticsV248Test-289604052-project-admin] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1744.091522] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be762620-b9d0-4aad-968a-3b872f542ed5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.100537] env[62684]: INFO nova.compute.manager [None req-bb78d3cc-07ea-4447-b406-d07ce99ae92c tempest-ServerDiagnosticsV248Test-289604052 tempest-ServerDiagnosticsV248Test-289604052-project-admin] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Retrieving diagnostics [ 1744.102036] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-409e5b46-3fe6-4897-b715-d2891792f934 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.167336] env[62684]: DEBUG nova.network.neutron [-] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1744.227827] env[62684]: DEBUG nova.compute.utils [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1744.232016] env[62684]: DEBUG nova.compute.manager [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1744.232198] env[62684]: DEBUG nova.network.neutron [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1744.248767] env[62684]: DEBUG nova.network.neutron [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1744.410545] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Creating linked-clone VM from snapshot {{(pid=62684) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1744.410805] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-80423549-9fc0-4b4f-af66-e8f5f1f43fb3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.420404] env[62684]: DEBUG oslo_vmware.api [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1744.420404] env[62684]: value = "task-2052378" [ 1744.420404] env[62684]: _type = "Task" [ 1744.420404] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1744.432085] env[62684]: DEBUG oslo_vmware.api [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052378, 'name': CloneVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.490488] env[62684]: DEBUG oslo_vmware.rw_handles [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e17ca3-2e62-e1a6-70cc-ea4180581025/disk-0.vmdk. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1744.492465] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e275d93-f46a-47f5-9abc-fc49ff27641f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.501078] env[62684]: DEBUG oslo_vmware.rw_handles [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e17ca3-2e62-e1a6-70cc-ea4180581025/disk-0.vmdk is in state: ready. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1744.501578] env[62684]: ERROR oslo_vmware.rw_handles [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e17ca3-2e62-e1a6-70cc-ea4180581025/disk-0.vmdk due to incomplete transfer. [ 1744.502114] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-5146bfb9-5950-4de6-978e-1375ef7d5528 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.512353] env[62684]: DEBUG oslo_vmware.rw_handles [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e17ca3-2e62-e1a6-70cc-ea4180581025/disk-0.vmdk. {{(pid=62684) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1744.512585] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Uploaded image a24e3d0e-c7d5-4604-9590-4de2389d27ca to the Glance image server {{(pid=62684) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1744.514642] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Destroying the VM {{(pid=62684) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1744.515021] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-b1d9b395-2e2c-4f17-881e-f6f4d980883a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.522461] env[62684]: DEBUG oslo_vmware.api [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Waiting for the task: (returnval){ [ 1744.522461] env[62684]: value = "task-2052379" [ 1744.522461] env[62684]: _type = "Task" [ 1744.522461] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1744.536635] env[62684]: DEBUG oslo_vmware.api [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052379, 'name': Destroy_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.538947] env[62684]: DEBUG nova.policy [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6f0ba461621d47eda3b857fcfd7849f2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ca5fca9cf7854c218807cb17d86aff8e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1744.637400] env[62684]: DEBUG nova.network.neutron [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Updating instance_info_cache with network_info: [{"id": "f8d1bd12-b449-41ef-bd95-755f619b639a", "address": "fa:16:3e:56:d6:c1", "network": {"id": "27f7ef6a-6ec1-4b90-962b-11f871569e37", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-434465261-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "949b8296172b4f4aab8bd28c8f4a03d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9f430b-e6f7-4a47-abd0-3cc7bef3e97c", "external-id": "nsx-vlan-transportzone-977", "segmentation_id": 977, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8d1bd12-b4", "ovs_interfaceid": "f8d1bd12-b449-41ef-bd95-755f619b639a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1744.670015] env[62684]: INFO nova.compute.manager [-] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Took 1.98 seconds to deallocate network for instance. [ 1744.732813] env[62684]: DEBUG nova.compute.manager [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1744.936015] env[62684]: DEBUG oslo_vmware.api [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052378, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.009927] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Acquiring lock "ab2c7cbe-6f46-4174-bffb-055a15f2d56b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1745.010253] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Lock "ab2c7cbe-6f46-4174-bffb-055a15f2d56b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1745.039241] env[62684]: DEBUG oslo_vmware.api [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052379, 'name': Destroy_Task} progress is 33%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.140856] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Releasing lock "refresh_cache-d532b5fa-90a3-4f25-8684-4eabaa432c86" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1745.143163] env[62684]: DEBUG nova.compute.manager [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Instance network_info: |[{"id": "f8d1bd12-b449-41ef-bd95-755f619b639a", "address": "fa:16:3e:56:d6:c1", "network": {"id": "27f7ef6a-6ec1-4b90-962b-11f871569e37", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-434465261-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "949b8296172b4f4aab8bd28c8f4a03d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9f430b-e6f7-4a47-abd0-3cc7bef3e97c", "external-id": "nsx-vlan-transportzone-977", "segmentation_id": 977, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8d1bd12-b4", "ovs_interfaceid": "f8d1bd12-b449-41ef-bd95-755f619b639a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1745.143693] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:56:d6:c1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1a9f430b-e6f7-4a47-abd0-3cc7bef3e97c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f8d1bd12-b449-41ef-bd95-755f619b639a', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1745.156160] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Creating folder: Project (949b8296172b4f4aab8bd28c8f4a03d7). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1745.164018] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7bf9be7c-6c97-4840-ae76-95426fff93fa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.180151] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Created folder: Project (949b8296172b4f4aab8bd28c8f4a03d7) in parent group-v421118. [ 1745.180151] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Creating folder: Instances. Parent ref: group-v421157. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1745.182467] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eb20842d-a1c9-4f7c-8d3c-df6db64c2bcd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.185132] env[62684]: DEBUG oslo_concurrency.lockutils [None req-55b959b2-8e05-47b8-a053-7dc19cb969a3 tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1745.193693] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Created folder: Instances in parent group-v421157. [ 1745.194020] env[62684]: DEBUG oslo.service.loopingcall [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1745.194123] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1745.194847] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-492da191-2848-46a7-9a3d-34129e6baa8c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.227791] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1745.227791] env[62684]: value = "task-2052382" [ 1745.227791] env[62684]: _type = "Task" [ 1745.227791] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1745.228613] env[62684]: DEBUG nova.network.neutron [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Successfully created port: 200d8b57-0aad-430f-8a16-63f7ce3d1668 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1745.251273] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052382, 'name': CreateVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.295133] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2ac0890-0375-475b-8fd3-91cf3d441379 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.306344] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b7b49c2-9998-498a-aded-c8b72470203d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.355626] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97eccaa6-f24d-4aa5-acab-36943812be85 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.365448] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e4710a7-ab74-4dfe-b0e6-3a9ead5cbec6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.382397] env[62684]: DEBUG nova.compute.provider_tree [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1745.436597] env[62684]: DEBUG oslo_vmware.api [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052378, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.536740] env[62684]: DEBUG oslo_vmware.api [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052379, 'name': Destroy_Task, 'duration_secs': 0.693695} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1745.536740] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Destroyed the VM [ 1745.537456] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Deleting Snapshot of the VM instance {{(pid=62684) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1745.537600] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-dc93b750-bd6c-4205-8212-97b1a03e229e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.546222] env[62684]: DEBUG oslo_vmware.api [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Waiting for the task: (returnval){ [ 1745.546222] env[62684]: value = "task-2052383" [ 1745.546222] env[62684]: _type = "Task" [ 1745.546222] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1745.559156] env[62684]: DEBUG oslo_vmware.api [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052383, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.751639] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052382, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.758582] env[62684]: DEBUG nova.compute.manager [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1745.769199] env[62684]: DEBUG oslo_concurrency.lockutils [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Acquiring lock "ca22ca59-1b60-46f0-ae83-03ed4002fa0d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1745.769753] env[62684]: DEBUG oslo_concurrency.lockutils [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Lock "ca22ca59-1b60-46f0-ae83-03ed4002fa0d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1745.788865] env[62684]: DEBUG nova.virt.hardware [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1745.788982] env[62684]: DEBUG nova.virt.hardware [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1745.789271] env[62684]: DEBUG nova.virt.hardware [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1745.789271] env[62684]: DEBUG nova.virt.hardware [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1745.789622] env[62684]: DEBUG nova.virt.hardware [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1745.789687] env[62684]: DEBUG nova.virt.hardware [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1745.789951] env[62684]: DEBUG nova.virt.hardware [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1745.790215] env[62684]: DEBUG nova.virt.hardware [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1745.790341] env[62684]: DEBUG nova.virt.hardware [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1745.790466] env[62684]: DEBUG nova.virt.hardware [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1745.790645] env[62684]: DEBUG nova.virt.hardware [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1745.792016] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f7ace2-e8c9-4759-b9fc-e17ad514554b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.801202] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e75c2888-2126-437f-a6be-611283fbe106 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.886049] env[62684]: DEBUG nova.scheduler.client.report [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1745.933585] env[62684]: DEBUG oslo_vmware.api [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052378, 'name': CloneVM_Task} progress is 95%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1746.066805] env[62684]: DEBUG oslo_vmware.api [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052383, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1746.231652] env[62684]: DEBUG nova.network.neutron [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Successfully updated port: 8f6b3e69-1998-4808-9c1a-1224c8ab5363 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1746.246097] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052382, 'name': CreateVM_Task, 'duration_secs': 0.526609} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1746.246229] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1746.246925] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1746.247117] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1746.247449] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1746.248284] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14e708f5-916a-4a95-b141-4a140e9fb621 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.253360] env[62684]: DEBUG oslo_vmware.api [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Waiting for the task: (returnval){ [ 1746.253360] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52599138-77da-f42a-baea-b73894fed9dc" [ 1746.253360] env[62684]: _type = "Task" [ 1746.253360] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1746.264147] env[62684]: DEBUG oslo_vmware.api [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52599138-77da-f42a-baea-b73894fed9dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1746.387860] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Acquiring lock "a9dfeb4d-a92e-41cf-9d2f-43086cc9e868" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1746.388036] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Lock "a9dfeb4d-a92e-41cf-9d2f-43086cc9e868" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1746.396050] env[62684]: DEBUG oslo_concurrency.lockutils [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.679s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1746.396616] env[62684]: DEBUG nova.compute.manager [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1746.401463] env[62684]: DEBUG oslo_concurrency.lockutils [None req-52ca3b22-a582-451c-8b71-2d9c5c424457 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.919s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1746.401849] env[62684]: DEBUG nova.objects.instance [None req-52ca3b22-a582-451c-8b71-2d9c5c424457 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Lazy-loading 'resources' on Instance uuid e4528a29-163d-4f5e-9497-6e6b90b290ba {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1746.436563] env[62684]: DEBUG oslo_vmware.api [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052378, 'name': CloneVM_Task, 'duration_secs': 1.849692} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1746.436563] env[62684]: INFO nova.virt.vmwareapi.vmops [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Created linked-clone VM from snapshot [ 1746.437844] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6681587c-d302-4d7e-a6c5-578be9b259fb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.447277] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Uploading image cb72481b-e414-4c6f-a209-0ec1dbe35df0 {{(pid=62684) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1746.485948] env[62684]: DEBUG oslo_vmware.rw_handles [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1746.485948] env[62684]: value = "vm-421156" [ 1746.485948] env[62684]: _type = "VirtualMachine" [ 1746.485948] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1746.487032] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-186522c7-14e3-4b68-9361-16e58bdd8226 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.497025] env[62684]: DEBUG oslo_vmware.rw_handles [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lease: (returnval){ [ 1746.497025] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5243518b-4fe1-ee55-7d1a-d3a79e585abd" [ 1746.497025] env[62684]: _type = "HttpNfcLease" [ 1746.497025] env[62684]: } obtained for exporting VM: (result){ [ 1746.497025] env[62684]: value = "vm-421156" [ 1746.497025] env[62684]: _type = "VirtualMachine" [ 1746.497025] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1746.497025] env[62684]: DEBUG oslo_vmware.api [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the lease: (returnval){ [ 1746.497025] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5243518b-4fe1-ee55-7d1a-d3a79e585abd" [ 1746.497025] env[62684]: _type = "HttpNfcLease" [ 1746.497025] env[62684]: } to be ready. {{(pid=62684) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1746.505565] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1746.505565] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5243518b-4fe1-ee55-7d1a-d3a79e585abd" [ 1746.505565] env[62684]: _type = "HttpNfcLease" [ 1746.505565] env[62684]: } is initializing. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1746.560046] env[62684]: DEBUG oslo_vmware.api [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052383, 'name': RemoveSnapshot_Task, 'duration_secs': 0.996059} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1746.560046] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Deleted Snapshot of the VM instance {{(pid=62684) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1746.560439] env[62684]: INFO nova.compute.manager [None req-cd6b8d11-569e-400d-9d63-f373504e7540 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Took 17.75 seconds to snapshot the instance on the hypervisor. [ 1746.739206] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Acquiring lock "refresh_cache-6b1f0e69-3915-40dc-b4ec-93ab174f12b6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1746.739206] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Acquired lock "refresh_cache-6b1f0e69-3915-40dc-b4ec-93ab174f12b6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1746.739518] env[62684]: DEBUG nova.network.neutron [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1746.766861] env[62684]: DEBUG oslo_vmware.api [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52599138-77da-f42a-baea-b73894fed9dc, 'name': SearchDatastore_Task, 'duration_secs': 0.012914} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1746.767222] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1746.767495] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1746.767974] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1746.768137] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1746.768345] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1746.768642] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-83ce75ee-045f-4a01-97d6-16f5015245fe {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.777695] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1746.777695] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1746.778398] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f45be08e-8a2d-477d-afb6-25864201119f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.784630] env[62684]: DEBUG oslo_vmware.api [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Waiting for the task: (returnval){ [ 1746.784630] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]526f5aec-2356-9742-cc1e-67d1239feb6b" [ 1746.784630] env[62684]: _type = "Task" [ 1746.784630] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1746.793225] env[62684]: DEBUG oslo_vmware.api [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]526f5aec-2356-9742-cc1e-67d1239feb6b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1746.906069] env[62684]: DEBUG nova.compute.utils [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1746.912326] env[62684]: DEBUG nova.compute.manager [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1746.912513] env[62684]: DEBUG nova.network.neutron [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1746.928464] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "4e5152b0-7bac-4dc2-b6c7-6590fa2d5978" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1746.928731] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "4e5152b0-7bac-4dc2-b6c7-6590fa2d5978" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1747.001511] env[62684]: DEBUG nova.policy [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '711b8497177c40c697d373ab866e3cb7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '72ac36eda47d4c51a4b421c764d0404d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1747.016224] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1747.016224] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5243518b-4fe1-ee55-7d1a-d3a79e585abd" [ 1747.016224] env[62684]: _type = "HttpNfcLease" [ 1747.016224] env[62684]: } is ready. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1747.016224] env[62684]: DEBUG oslo_vmware.rw_handles [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1747.016224] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5243518b-4fe1-ee55-7d1a-d3a79e585abd" [ 1747.016224] env[62684]: _type = "HttpNfcLease" [ 1747.016224] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1747.016224] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d38ab34-3615-497c-9033-1ed239c63b87 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.026159] env[62684]: DEBUG oslo_vmware.rw_handles [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525a881a-c1da-11de-7302-0745902fcf95/disk-0.vmdk from lease info. {{(pid=62684) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1747.026449] env[62684]: DEBUG oslo_vmware.rw_handles [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525a881a-c1da-11de-7302-0745902fcf95/disk-0.vmdk for reading. {{(pid=62684) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1747.102262] env[62684]: DEBUG nova.compute.manager [req-23571918-f036-430a-8224-2d6495d666a9 req-46fdfecb-8677-4926-beb4-15d582904dbf service nova] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Received event network-changed-b9e25052-ec41-470d-b549-89e542cb4366 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1747.102262] env[62684]: DEBUG nova.compute.manager [req-23571918-f036-430a-8224-2d6495d666a9 req-46fdfecb-8677-4926-beb4-15d582904dbf service nova] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Refreshing instance network info cache due to event network-changed-b9e25052-ec41-470d-b549-89e542cb4366. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1747.102505] env[62684]: DEBUG oslo_concurrency.lockutils [req-23571918-f036-430a-8224-2d6495d666a9 req-46fdfecb-8677-4926-beb4-15d582904dbf service nova] Acquiring lock "refresh_cache-effc673a-103f-413b-88ac-6907ad1ee852" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1747.102914] env[62684]: DEBUG oslo_concurrency.lockutils [req-23571918-f036-430a-8224-2d6495d666a9 req-46fdfecb-8677-4926-beb4-15d582904dbf service nova] Acquired lock "refresh_cache-effc673a-103f-413b-88ac-6907ad1ee852" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1747.102914] env[62684]: DEBUG nova.network.neutron [req-23571918-f036-430a-8224-2d6495d666a9 req-46fdfecb-8677-4926-beb4-15d582904dbf service nova] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Refreshing network info cache for port b9e25052-ec41-470d-b549-89e542cb4366 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1747.179103] env[62684]: DEBUG nova.compute.manager [req-be4f1731-7a04-4775-9cc2-b374dc30326f req-c3594492-37a6-446f-a70e-f51050cb1fd7 service nova] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Received event network-changed-f8d1bd12-b449-41ef-bd95-755f619b639a {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1747.179346] env[62684]: DEBUG nova.compute.manager [req-be4f1731-7a04-4775-9cc2-b374dc30326f req-c3594492-37a6-446f-a70e-f51050cb1fd7 service nova] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Refreshing instance network info cache due to event network-changed-f8d1bd12-b449-41ef-bd95-755f619b639a. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1747.179683] env[62684]: DEBUG oslo_concurrency.lockutils [req-be4f1731-7a04-4775-9cc2-b374dc30326f req-c3594492-37a6-446f-a70e-f51050cb1fd7 service nova] Acquiring lock "refresh_cache-d532b5fa-90a3-4f25-8684-4eabaa432c86" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1747.179775] env[62684]: DEBUG oslo_concurrency.lockutils [req-be4f1731-7a04-4775-9cc2-b374dc30326f req-c3594492-37a6-446f-a70e-f51050cb1fd7 service nova] Acquired lock "refresh_cache-d532b5fa-90a3-4f25-8684-4eabaa432c86" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1747.179950] env[62684]: DEBUG nova.network.neutron [req-be4f1731-7a04-4775-9cc2-b374dc30326f req-c3594492-37a6-446f-a70e-f51050cb1fd7 service nova] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Refreshing network info cache for port f8d1bd12-b449-41ef-bd95-755f619b639a {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1747.221328] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-301ee633-4b39-4a7c-86ec-03c35c56b1eb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.299595] env[62684]: DEBUG oslo_vmware.api [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]526f5aec-2356-9742-cc1e-67d1239feb6b, 'name': SearchDatastore_Task, 'duration_secs': 0.023452} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1747.301972] env[62684]: DEBUG nova.network.neutron [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1747.304137] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3f9e769-a24f-4fc2-9a58-1c441d6ae32a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.312783] env[62684]: DEBUG oslo_vmware.api [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Waiting for the task: (returnval){ [ 1747.312783] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5295dd2c-8ee8-5ad2-a338-34c439ed00aa" [ 1747.312783] env[62684]: _type = "Task" [ 1747.312783] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1747.327883] env[62684]: DEBUG oslo_vmware.api [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5295dd2c-8ee8-5ad2-a338-34c439ed00aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1747.415694] env[62684]: DEBUG nova.compute.manager [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1747.458883] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b76ca03c-db8f-481c-b968-6fcc921ad2f7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.475031] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6840479-77af-41c3-a7f7-4bd36848204f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.518172] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c969603-5189-487a-973a-1ac68f7e92ab {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.532198] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3400832-b886-4a52-b9a1-ad9f2ab8fbd4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.547661] env[62684]: DEBUG nova.compute.provider_tree [None req-52ca3b22-a582-451c-8b71-2d9c5c424457 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1747.681079] env[62684]: DEBUG nova.network.neutron [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Updating instance_info_cache with network_info: [{"id": "8f6b3e69-1998-4808-9c1a-1224c8ab5363", "address": "fa:16:3e:f0:98:53", "network": {"id": "c4f4fd23-e4d0-4e7a-861b-71eeb3008ae5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1003879390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e169ffb4120143dca6d67108986e62f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f6b3e69-19", "ovs_interfaceid": "8f6b3e69-1998-4808-9c1a-1224c8ab5363", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1747.727653] env[62684]: DEBUG nova.network.neutron [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Successfully created port: 30471946-98e4-4413-acb5-8a8190d1dd82 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1747.824648] env[62684]: DEBUG oslo_vmware.api [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5295dd2c-8ee8-5ad2-a338-34c439ed00aa, 'name': SearchDatastore_Task, 'duration_secs': 0.031957} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1747.824982] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1747.825283] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] d532b5fa-90a3-4f25-8684-4eabaa432c86/d532b5fa-90a3-4f25-8684-4eabaa432c86.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1747.825560] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-69651883-4c7d-4dc0-b10e-4565fd4feccc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.841803] env[62684]: DEBUG oslo_vmware.api [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Waiting for the task: (returnval){ [ 1747.841803] env[62684]: value = "task-2052385" [ 1747.841803] env[62684]: _type = "Task" [ 1747.841803] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1747.852183] env[62684]: DEBUG oslo_vmware.api [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Task: {'id': task-2052385, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1747.974291] env[62684]: DEBUG nova.network.neutron [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Successfully updated port: 200d8b57-0aad-430f-8a16-63f7ce3d1668 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1748.054145] env[62684]: DEBUG nova.scheduler.client.report [None req-52ca3b22-a582-451c-8b71-2d9c5c424457 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1748.132541] env[62684]: DEBUG nova.network.neutron [req-be4f1731-7a04-4775-9cc2-b374dc30326f req-c3594492-37a6-446f-a70e-f51050cb1fd7 service nova] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Updated VIF entry in instance network info cache for port f8d1bd12-b449-41ef-bd95-755f619b639a. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1748.132997] env[62684]: DEBUG nova.network.neutron [req-be4f1731-7a04-4775-9cc2-b374dc30326f req-c3594492-37a6-446f-a70e-f51050cb1fd7 service nova] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Updating instance_info_cache with network_info: [{"id": "f8d1bd12-b449-41ef-bd95-755f619b639a", "address": "fa:16:3e:56:d6:c1", "network": {"id": "27f7ef6a-6ec1-4b90-962b-11f871569e37", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-434465261-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "949b8296172b4f4aab8bd28c8f4a03d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9f430b-e6f7-4a47-abd0-3cc7bef3e97c", "external-id": "nsx-vlan-transportzone-977", "segmentation_id": 977, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8d1bd12-b4", "ovs_interfaceid": "f8d1bd12-b449-41ef-bd95-755f619b639a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1748.195171] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Releasing lock "refresh_cache-6b1f0e69-3915-40dc-b4ec-93ab174f12b6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1748.196350] env[62684]: DEBUG nova.compute.manager [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Instance network_info: |[{"id": "8f6b3e69-1998-4808-9c1a-1224c8ab5363", "address": "fa:16:3e:f0:98:53", "network": {"id": "c4f4fd23-e4d0-4e7a-861b-71eeb3008ae5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1003879390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e169ffb4120143dca6d67108986e62f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f6b3e69-19", "ovs_interfaceid": "8f6b3e69-1998-4808-9c1a-1224c8ab5363", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1748.197057] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f0:98:53', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '816c6e38-e200-4544-8c5b-9fc3e16c5761', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8f6b3e69-1998-4808-9c1a-1224c8ab5363', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1748.207630] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Creating folder: Project (e169ffb4120143dca6d67108986e62f6). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1748.208337] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c3df1860-4ced-4d8e-8c80-437c2646c5f0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.223980] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Created folder: Project (e169ffb4120143dca6d67108986e62f6) in parent group-v421118. [ 1748.224564] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Creating folder: Instances. Parent ref: group-v421160. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1748.229044] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d7f09dc7-6af1-4d6a-a905-90f220d7c1b5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.239333] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Created folder: Instances in parent group-v421160. [ 1748.239722] env[62684]: DEBUG oslo.service.loopingcall [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1748.240058] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1748.242860] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-41fb88e4-f7a2-475b-9bf4-28cc68699f59 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.274402] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1748.274402] env[62684]: value = "task-2052388" [ 1748.274402] env[62684]: _type = "Task" [ 1748.274402] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1748.290831] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052388, 'name': CreateVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.331041] env[62684]: DEBUG oslo_concurrency.lockutils [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Acquiring lock "8c046991-b294-4f33-9fce-a241984d66d7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1748.331410] env[62684]: DEBUG oslo_concurrency.lockutils [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Lock "8c046991-b294-4f33-9fce-a241984d66d7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1748.359353] env[62684]: DEBUG oslo_vmware.api [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Task: {'id': task-2052385, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.436127] env[62684]: DEBUG nova.compute.manager [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1748.463905] env[62684]: DEBUG nova.virt.hardware [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:45:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='914551720',id=23,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-31962424',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1748.463905] env[62684]: DEBUG nova.virt.hardware [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1748.463905] env[62684]: DEBUG nova.virt.hardware [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1748.464149] env[62684]: DEBUG nova.virt.hardware [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1748.464149] env[62684]: DEBUG nova.virt.hardware [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1748.464149] env[62684]: DEBUG nova.virt.hardware [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1748.464248] env[62684]: DEBUG nova.virt.hardware [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1748.464437] env[62684]: DEBUG nova.virt.hardware [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1748.464780] env[62684]: DEBUG nova.virt.hardware [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1748.465019] env[62684]: DEBUG nova.virt.hardware [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1748.465138] env[62684]: DEBUG nova.virt.hardware [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1748.466573] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c62d411c-af42-4aa8-82fd-878f1cbf6977 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.477563] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54b2f2a8-5668-4fc8-afcc-ba8e6e1cb6aa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.482272] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Acquiring lock "refresh_cache-43d28811-26e4-4016-9f82-98349d4a05b7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1748.482272] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Acquired lock "refresh_cache-43d28811-26e4-4016-9f82-98349d4a05b7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1748.483391] env[62684]: DEBUG nova.network.neutron [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1748.561542] env[62684]: DEBUG oslo_concurrency.lockutils [None req-52ca3b22-a582-451c-8b71-2d9c5c424457 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.160s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1748.564426] env[62684]: DEBUG oslo_concurrency.lockutils [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.058s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1748.568612] env[62684]: INFO nova.compute.claims [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1748.605950] env[62684]: INFO nova.scheduler.client.report [None req-52ca3b22-a582-451c-8b71-2d9c5c424457 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Deleted allocations for instance e4528a29-163d-4f5e-9497-6e6b90b290ba [ 1748.635891] env[62684]: DEBUG oslo_concurrency.lockutils [req-be4f1731-7a04-4775-9cc2-b374dc30326f req-c3594492-37a6-446f-a70e-f51050cb1fd7 service nova] Releasing lock "refresh_cache-d532b5fa-90a3-4f25-8684-4eabaa432c86" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1748.774970] env[62684]: DEBUG nova.network.neutron [req-23571918-f036-430a-8224-2d6495d666a9 req-46fdfecb-8677-4926-beb4-15d582904dbf service nova] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Updated VIF entry in instance network info cache for port b9e25052-ec41-470d-b549-89e542cb4366. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1748.775446] env[62684]: DEBUG nova.network.neutron [req-23571918-f036-430a-8224-2d6495d666a9 req-46fdfecb-8677-4926-beb4-15d582904dbf service nova] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Updating instance_info_cache with network_info: [{"id": "b9e25052-ec41-470d-b549-89e542cb4366", "address": "fa:16:3e:55:a2:89", "network": {"id": "532579c0-d485-4585-bc63-1bbd3af0367a", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1836758902-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e2e0ad7001b4b59805c1d6a3a0caf35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9e25052-ec", "ovs_interfaceid": "b9e25052-ec41-470d-b549-89e542cb4366", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1748.787406] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052388, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.856636] env[62684]: DEBUG oslo_vmware.api [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Task: {'id': task-2052385, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.622362} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1748.857198] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] d532b5fa-90a3-4f25-8684-4eabaa432c86/d532b5fa-90a3-4f25-8684-4eabaa432c86.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1748.857198] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1748.857530] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f0fad5a7-da49-4c19-b5bc-ce6242409f61 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.864383] env[62684]: DEBUG oslo_vmware.api [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Waiting for the task: (returnval){ [ 1748.864383] env[62684]: value = "task-2052389" [ 1748.864383] env[62684]: _type = "Task" [ 1748.864383] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1748.873187] env[62684]: DEBUG oslo_vmware.api [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Task: {'id': task-2052389, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.070385] env[62684]: DEBUG nova.network.neutron [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1749.116766] env[62684]: DEBUG oslo_concurrency.lockutils [None req-52ca3b22-a582-451c-8b71-2d9c5c424457 tempest-ServerDiagnosticsNegativeTest-433673317 tempest-ServerDiagnosticsNegativeTest-433673317-project-member] Lock "e4528a29-163d-4f5e-9497-6e6b90b290ba" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.279s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1749.282598] env[62684]: DEBUG oslo_concurrency.lockutils [req-23571918-f036-430a-8224-2d6495d666a9 req-46fdfecb-8677-4926-beb4-15d582904dbf service nova] Releasing lock "refresh_cache-effc673a-103f-413b-88ac-6907ad1ee852" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1749.282883] env[62684]: DEBUG nova.compute.manager [req-23571918-f036-430a-8224-2d6495d666a9 req-46fdfecb-8677-4926-beb4-15d582904dbf service nova] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Received event network-vif-deleted-45eb32aa-2917-4675-a758-bf202fb0fc08 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1749.287325] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052388, 'name': CreateVM_Task, 'duration_secs': 0.566035} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1749.287476] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1749.288140] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1749.288311] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1749.288638] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1749.289234] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ef2a6d1-808e-4a28-8e8b-ef23f9d41a7e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.294715] env[62684]: DEBUG oslo_vmware.api [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Waiting for the task: (returnval){ [ 1749.294715] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52970020-85de-48c9-d56a-af5e235458e7" [ 1749.294715] env[62684]: _type = "Task" [ 1749.294715] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1749.306984] env[62684]: DEBUG oslo_vmware.api [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52970020-85de-48c9-d56a-af5e235458e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.376996] env[62684]: DEBUG oslo_vmware.api [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Task: {'id': task-2052389, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070343} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1749.377326] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1749.378108] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4c89b06-9a87-4d62-a5a2-f79df12824e6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.400571] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Reconfiguring VM instance instance-0000000c to attach disk [datastore2] d532b5fa-90a3-4f25-8684-4eabaa432c86/d532b5fa-90a3-4f25-8684-4eabaa432c86.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1749.401283] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a8268185-ae7a-47b8-a4c2-f6169f5c896a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.425578] env[62684]: DEBUG oslo_vmware.api [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Waiting for the task: (returnval){ [ 1749.425578] env[62684]: value = "task-2052390" [ 1749.425578] env[62684]: _type = "Task" [ 1749.425578] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1749.434174] env[62684]: DEBUG oslo_vmware.api [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Task: {'id': task-2052390, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.570625] env[62684]: DEBUG nova.network.neutron [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Updating instance_info_cache with network_info: [{"id": "200d8b57-0aad-430f-8a16-63f7ce3d1668", "address": "fa:16:3e:30:33:9d", "network": {"id": "8aa987c0-d5cd-4b21-a9ca-c01cb83c668a", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1441412280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca5fca9cf7854c218807cb17d86aff8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bd8c6be9-575e-4605-b779-98606281a3bf", "external-id": "nsx-vlan-transportzone-273", "segmentation_id": 273, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap200d8b57-0a", "ovs_interfaceid": "200d8b57-0aad-430f-8a16-63f7ce3d1668", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1749.630022] env[62684]: DEBUG nova.network.neutron [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Successfully updated port: 30471946-98e4-4413-acb5-8a8190d1dd82 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1749.808445] env[62684]: DEBUG oslo_vmware.api [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52970020-85de-48c9-d56a-af5e235458e7, 'name': SearchDatastore_Task, 'duration_secs': 0.013473} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1749.808445] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1749.808445] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1749.808445] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1749.809051] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1749.809051] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1749.811517] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c1f040ec-b7cd-4041-8056-efd214be1f3b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.821280] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1749.821460] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1749.822214] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ca2893e-f6a4-4484-bc42-cf78fe893e8f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.829943] env[62684]: DEBUG oslo_vmware.api [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Waiting for the task: (returnval){ [ 1749.829943] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]524d16e7-be2d-2605-c38a-2c3074ffafcb" [ 1749.829943] env[62684]: _type = "Task" [ 1749.829943] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1749.841340] env[62684]: DEBUG oslo_vmware.api [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]524d16e7-be2d-2605-c38a-2c3074ffafcb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.938212] env[62684]: DEBUG oslo_vmware.api [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Task: {'id': task-2052390, 'name': ReconfigVM_Task, 'duration_secs': 0.458471} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1749.938530] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Reconfigured VM instance instance-0000000c to attach disk [datastore2] d532b5fa-90a3-4f25-8684-4eabaa432c86/d532b5fa-90a3-4f25-8684-4eabaa432c86.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1749.941905] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2ebd0f94-6ca7-4ad5-9fd4-1f79afdfb568 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.948427] env[62684]: DEBUG oslo_vmware.api [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Waiting for the task: (returnval){ [ 1749.948427] env[62684]: value = "task-2052391" [ 1749.948427] env[62684]: _type = "Task" [ 1749.948427] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1749.959697] env[62684]: DEBUG oslo_vmware.api [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Task: {'id': task-2052391, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1750.049092] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7dcc079-482d-4d45-af12-eb1ee6db6b81 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.057335] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9514ab43-91a1-4258-b072-d5b7d586476e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.092867] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Releasing lock "refresh_cache-43d28811-26e4-4016-9f82-98349d4a05b7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1750.093189] env[62684]: DEBUG nova.compute.manager [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Instance network_info: |[{"id": "200d8b57-0aad-430f-8a16-63f7ce3d1668", "address": "fa:16:3e:30:33:9d", "network": {"id": "8aa987c0-d5cd-4b21-a9ca-c01cb83c668a", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1441412280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca5fca9cf7854c218807cb17d86aff8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bd8c6be9-575e-4605-b779-98606281a3bf", "external-id": "nsx-vlan-transportzone-273", "segmentation_id": 273, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap200d8b57-0a", "ovs_interfaceid": "200d8b57-0aad-430f-8a16-63f7ce3d1668", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1750.094080] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:30:33:9d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bd8c6be9-575e-4605-b779-98606281a3bf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '200d8b57-0aad-430f-8a16-63f7ce3d1668', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1750.101956] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Creating folder: Project (ca5fca9cf7854c218807cb17d86aff8e). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1750.102796] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbe2e59d-a0dc-44df-96f5-88b0b1150c07 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.105846] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-374635c0-b2e1-46f9-9dd2-f944745cc4e4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.113178] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99c1ecb9-0128-4bfc-b2d5-339118f92555 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.119055] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Created folder: Project (ca5fca9cf7854c218807cb17d86aff8e) in parent group-v421118. [ 1750.119055] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Creating folder: Instances. Parent ref: group-v421163. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1750.119766] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-22f099c9-df71-4427-850d-77664772f1fb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.131703] env[62684]: DEBUG nova.compute.provider_tree [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1750.135707] env[62684]: DEBUG oslo_concurrency.lockutils [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Acquiring lock "refresh_cache-73f27fc0-ebae-41c7-b292-14396f79a5a2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1750.135912] env[62684]: DEBUG oslo_concurrency.lockutils [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Acquired lock "refresh_cache-73f27fc0-ebae-41c7-b292-14396f79a5a2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1750.136042] env[62684]: DEBUG nova.network.neutron [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1750.138509] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Created folder: Instances in parent group-v421163. [ 1750.142729] env[62684]: DEBUG oslo.service.loopingcall [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1750.142729] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1750.142729] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d6a6e711-d780-419d-974e-f1ba7a9d5496 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.161022] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1750.161022] env[62684]: value = "task-2052394" [ 1750.161022] env[62684]: _type = "Task" [ 1750.161022] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1750.170736] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052394, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1750.345468] env[62684]: DEBUG oslo_vmware.api [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]524d16e7-be2d-2605-c38a-2c3074ffafcb, 'name': SearchDatastore_Task, 'duration_secs': 0.013696} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1750.346827] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c2cb87b-5cda-440b-a9f3-afb4944b8bcf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.355068] env[62684]: DEBUG oslo_vmware.api [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Waiting for the task: (returnval){ [ 1750.355068] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520cf352-9531-9a54-0fdb-3d4337abb1c0" [ 1750.355068] env[62684]: _type = "Task" [ 1750.355068] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1750.363506] env[62684]: DEBUG oslo_vmware.api [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520cf352-9531-9a54-0fdb-3d4337abb1c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1750.459170] env[62684]: DEBUG nova.compute.manager [req-fd696198-c6e6-429a-ba3b-f76ca03d4ccb req-15708223-2dc0-4647-b75f-d3716da59ee2 service nova] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Received event network-vif-plugged-8f6b3e69-1998-4808-9c1a-1224c8ab5363 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1750.459170] env[62684]: DEBUG oslo_concurrency.lockutils [req-fd696198-c6e6-429a-ba3b-f76ca03d4ccb req-15708223-2dc0-4647-b75f-d3716da59ee2 service nova] Acquiring lock "6b1f0e69-3915-40dc-b4ec-93ab174f12b6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1750.459170] env[62684]: DEBUG oslo_concurrency.lockutils [req-fd696198-c6e6-429a-ba3b-f76ca03d4ccb req-15708223-2dc0-4647-b75f-d3716da59ee2 service nova] Lock "6b1f0e69-3915-40dc-b4ec-93ab174f12b6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1750.459170] env[62684]: DEBUG oslo_concurrency.lockutils [req-fd696198-c6e6-429a-ba3b-f76ca03d4ccb req-15708223-2dc0-4647-b75f-d3716da59ee2 service nova] Lock "6b1f0e69-3915-40dc-b4ec-93ab174f12b6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1750.459170] env[62684]: DEBUG nova.compute.manager [req-fd696198-c6e6-429a-ba3b-f76ca03d4ccb req-15708223-2dc0-4647-b75f-d3716da59ee2 service nova] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] No waiting events found dispatching network-vif-plugged-8f6b3e69-1998-4808-9c1a-1224c8ab5363 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1750.459591] env[62684]: WARNING nova.compute.manager [req-fd696198-c6e6-429a-ba3b-f76ca03d4ccb req-15708223-2dc0-4647-b75f-d3716da59ee2 service nova] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Received unexpected event network-vif-plugged-8f6b3e69-1998-4808-9c1a-1224c8ab5363 for instance with vm_state building and task_state spawning. [ 1750.459591] env[62684]: DEBUG nova.compute.manager [req-fd696198-c6e6-429a-ba3b-f76ca03d4ccb req-15708223-2dc0-4647-b75f-d3716da59ee2 service nova] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Received event network-changed-8f6b3e69-1998-4808-9c1a-1224c8ab5363 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1750.459836] env[62684]: DEBUG nova.compute.manager [req-fd696198-c6e6-429a-ba3b-f76ca03d4ccb req-15708223-2dc0-4647-b75f-d3716da59ee2 service nova] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Refreshing instance network info cache due to event network-changed-8f6b3e69-1998-4808-9c1a-1224c8ab5363. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1750.460861] env[62684]: DEBUG oslo_concurrency.lockutils [req-fd696198-c6e6-429a-ba3b-f76ca03d4ccb req-15708223-2dc0-4647-b75f-d3716da59ee2 service nova] Acquiring lock "refresh_cache-6b1f0e69-3915-40dc-b4ec-93ab174f12b6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1750.461152] env[62684]: DEBUG oslo_concurrency.lockutils [req-fd696198-c6e6-429a-ba3b-f76ca03d4ccb req-15708223-2dc0-4647-b75f-d3716da59ee2 service nova] Acquired lock "refresh_cache-6b1f0e69-3915-40dc-b4ec-93ab174f12b6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1750.461486] env[62684]: DEBUG nova.network.neutron [req-fd696198-c6e6-429a-ba3b-f76ca03d4ccb req-15708223-2dc0-4647-b75f-d3716da59ee2 service nova] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Refreshing network info cache for port 8f6b3e69-1998-4808-9c1a-1224c8ab5363 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1750.468523] env[62684]: DEBUG oslo_vmware.api [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Task: {'id': task-2052391, 'name': Rename_Task, 'duration_secs': 0.239808} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1750.469305] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1750.469760] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c3e83d78-2f39-4e63-8b6f-92f0f517f4aa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.478624] env[62684]: DEBUG oslo_vmware.api [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Waiting for the task: (returnval){ [ 1750.478624] env[62684]: value = "task-2052395" [ 1750.478624] env[62684]: _type = "Task" [ 1750.478624] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1750.488922] env[62684]: DEBUG oslo_vmware.api [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Task: {'id': task-2052395, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1750.545128] env[62684]: DEBUG nova.compute.manager [req-42f2b8b0-d391-459a-a366-33e5e84f2b11 req-a17c7fad-b3ef-4859-8a6e-bbff19c30843 service nova] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Received event network-vif-plugged-200d8b57-0aad-430f-8a16-63f7ce3d1668 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1750.545239] env[62684]: DEBUG oslo_concurrency.lockutils [req-42f2b8b0-d391-459a-a366-33e5e84f2b11 req-a17c7fad-b3ef-4859-8a6e-bbff19c30843 service nova] Acquiring lock "43d28811-26e4-4016-9f82-98349d4a05b7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1750.545543] env[62684]: DEBUG oslo_concurrency.lockutils [req-42f2b8b0-d391-459a-a366-33e5e84f2b11 req-a17c7fad-b3ef-4859-8a6e-bbff19c30843 service nova] Lock "43d28811-26e4-4016-9f82-98349d4a05b7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1750.545671] env[62684]: DEBUG oslo_concurrency.lockutils [req-42f2b8b0-d391-459a-a366-33e5e84f2b11 req-a17c7fad-b3ef-4859-8a6e-bbff19c30843 service nova] Lock "43d28811-26e4-4016-9f82-98349d4a05b7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1750.545769] env[62684]: DEBUG nova.compute.manager [req-42f2b8b0-d391-459a-a366-33e5e84f2b11 req-a17c7fad-b3ef-4859-8a6e-bbff19c30843 service nova] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] No waiting events found dispatching network-vif-plugged-200d8b57-0aad-430f-8a16-63f7ce3d1668 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1750.545933] env[62684]: WARNING nova.compute.manager [req-42f2b8b0-d391-459a-a366-33e5e84f2b11 req-a17c7fad-b3ef-4859-8a6e-bbff19c30843 service nova] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Received unexpected event network-vif-plugged-200d8b57-0aad-430f-8a16-63f7ce3d1668 for instance with vm_state building and task_state spawning. [ 1750.546459] env[62684]: DEBUG nova.compute.manager [req-42f2b8b0-d391-459a-a366-33e5e84f2b11 req-a17c7fad-b3ef-4859-8a6e-bbff19c30843 service nova] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Received event network-changed-200d8b57-0aad-430f-8a16-63f7ce3d1668 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1750.546672] env[62684]: DEBUG nova.compute.manager [req-42f2b8b0-d391-459a-a366-33e5e84f2b11 req-a17c7fad-b3ef-4859-8a6e-bbff19c30843 service nova] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Refreshing instance network info cache due to event network-changed-200d8b57-0aad-430f-8a16-63f7ce3d1668. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1750.547204] env[62684]: DEBUG oslo_concurrency.lockutils [req-42f2b8b0-d391-459a-a366-33e5e84f2b11 req-a17c7fad-b3ef-4859-8a6e-bbff19c30843 service nova] Acquiring lock "refresh_cache-43d28811-26e4-4016-9f82-98349d4a05b7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1750.547204] env[62684]: DEBUG oslo_concurrency.lockutils [req-42f2b8b0-d391-459a-a366-33e5e84f2b11 req-a17c7fad-b3ef-4859-8a6e-bbff19c30843 service nova] Acquired lock "refresh_cache-43d28811-26e4-4016-9f82-98349d4a05b7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1750.547204] env[62684]: DEBUG nova.network.neutron [req-42f2b8b0-d391-459a-a366-33e5e84f2b11 req-a17c7fad-b3ef-4859-8a6e-bbff19c30843 service nova] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Refreshing network info cache for port 200d8b57-0aad-430f-8a16-63f7ce3d1668 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1750.635062] env[62684]: DEBUG nova.scheduler.client.report [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1750.672651] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052394, 'name': CreateVM_Task, 'duration_secs': 0.489428} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1750.672852] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1750.673931] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1750.673931] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1750.674409] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1750.674578] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39f1bc7e-9a56-4fce-a331-2d63b6eb55e2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.680281] env[62684]: DEBUG oslo_vmware.api [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Waiting for the task: (returnval){ [ 1750.680281] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ae770c-bba3-9cbf-e819-b8fe0954f306" [ 1750.680281] env[62684]: _type = "Task" [ 1750.680281] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1750.690469] env[62684]: DEBUG oslo_vmware.api [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ae770c-bba3-9cbf-e819-b8fe0954f306, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1750.715043] env[62684]: DEBUG nova.network.neutron [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1750.870906] env[62684]: DEBUG oslo_vmware.api [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520cf352-9531-9a54-0fdb-3d4337abb1c0, 'name': SearchDatastore_Task, 'duration_secs': 0.013095} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1750.871433] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1750.874030] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 6b1f0e69-3915-40dc-b4ec-93ab174f12b6/6b1f0e69-3915-40dc-b4ec-93ab174f12b6.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1750.874030] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4678e5ae-5a87-450a-a88b-bd35ac4d859a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.880902] env[62684]: DEBUG oslo_vmware.api [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Waiting for the task: (returnval){ [ 1750.880902] env[62684]: value = "task-2052396" [ 1750.880902] env[62684]: _type = "Task" [ 1750.880902] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1750.891168] env[62684]: DEBUG oslo_vmware.api [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': task-2052396, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1750.951169] env[62684]: DEBUG nova.network.neutron [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Updating instance_info_cache with network_info: [{"id": "30471946-98e4-4413-acb5-8a8190d1dd82", "address": "fa:16:3e:85:31:4c", "network": {"id": "7982dcb9-e661-4690-9931-bf412f4a564e", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1278071472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72ac36eda47d4c51a4b421c764d0404d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9bb629cd-6d0f-4bed-965c-bd04a2f3ec49", "external-id": "nsx-vlan-transportzone-848", "segmentation_id": 848, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30471946-98", "ovs_interfaceid": "30471946-98e4-4413-acb5-8a8190d1dd82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1750.992251] env[62684]: DEBUG oslo_vmware.api [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Task: {'id': task-2052395, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.150073] env[62684]: DEBUG oslo_concurrency.lockutils [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.583s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1751.150073] env[62684]: DEBUG nova.compute.manager [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1751.152505] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.932s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1751.154492] env[62684]: INFO nova.compute.claims [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1751.200735] env[62684]: DEBUG oslo_vmware.api [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ae770c-bba3-9cbf-e819-b8fe0954f306, 'name': SearchDatastore_Task, 'duration_secs': 0.018186} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1751.200735] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1751.200735] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1751.200735] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1751.201325] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1751.201945] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1751.203479] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-774e9d8b-1419-40ff-b19e-6e56088629ee {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.223812] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1751.224184] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1751.225248] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c3c6390-419f-4b77-b0e6-3d73dfebfb4d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.235587] env[62684]: DEBUG oslo_vmware.api [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Waiting for the task: (returnval){ [ 1751.235587] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5259ecbb-c8f2-ff17-d49a-60f6fff266ca" [ 1751.235587] env[62684]: _type = "Task" [ 1751.235587] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1751.246387] env[62684]: DEBUG oslo_vmware.api [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5259ecbb-c8f2-ff17-d49a-60f6fff266ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.269240] env[62684]: DEBUG nova.network.neutron [req-fd696198-c6e6-429a-ba3b-f76ca03d4ccb req-15708223-2dc0-4647-b75f-d3716da59ee2 service nova] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Updated VIF entry in instance network info cache for port 8f6b3e69-1998-4808-9c1a-1224c8ab5363. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1751.269621] env[62684]: DEBUG nova.network.neutron [req-fd696198-c6e6-429a-ba3b-f76ca03d4ccb req-15708223-2dc0-4647-b75f-d3716da59ee2 service nova] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Updating instance_info_cache with network_info: [{"id": "8f6b3e69-1998-4808-9c1a-1224c8ab5363", "address": "fa:16:3e:f0:98:53", "network": {"id": "c4f4fd23-e4d0-4e7a-861b-71eeb3008ae5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1003879390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e169ffb4120143dca6d67108986e62f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f6b3e69-19", "ovs_interfaceid": "8f6b3e69-1998-4808-9c1a-1224c8ab5363", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1751.397599] env[62684]: DEBUG oslo_vmware.api [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': task-2052396, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.458331] env[62684]: DEBUG oslo_concurrency.lockutils [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Releasing lock "refresh_cache-73f27fc0-ebae-41c7-b292-14396f79a5a2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1751.458331] env[62684]: DEBUG nova.compute.manager [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Instance network_info: |[{"id": "30471946-98e4-4413-acb5-8a8190d1dd82", "address": "fa:16:3e:85:31:4c", "network": {"id": "7982dcb9-e661-4690-9931-bf412f4a564e", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1278071472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72ac36eda47d4c51a4b421c764d0404d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9bb629cd-6d0f-4bed-965c-bd04a2f3ec49", "external-id": "nsx-vlan-transportzone-848", "segmentation_id": 848, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30471946-98", "ovs_interfaceid": "30471946-98e4-4413-acb5-8a8190d1dd82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1751.458784] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:31:4c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9bb629cd-6d0f-4bed-965c-bd04a2f3ec49', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '30471946-98e4-4413-acb5-8a8190d1dd82', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1751.467953] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Creating folder: Project (72ac36eda47d4c51a4b421c764d0404d). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1751.471940] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fa004c0a-e6ce-4768-b9a3-ee47cfebf0b3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.486549] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Created folder: Project (72ac36eda47d4c51a4b421c764d0404d) in parent group-v421118. [ 1751.486771] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Creating folder: Instances. Parent ref: group-v421166. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1751.487049] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-87be1448-6424-4604-8e87-cd447cff6fa7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.494677] env[62684]: DEBUG oslo_vmware.api [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Task: {'id': task-2052395, 'name': PowerOnVM_Task} progress is 73%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.496468] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Created folder: Instances in parent group-v421166. [ 1751.496699] env[62684]: DEBUG oslo.service.loopingcall [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1751.496898] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1751.497124] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ef2bcda0-9dde-4d2c-84d6-f14fd142d149 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.522922] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1751.522922] env[62684]: value = "task-2052399" [ 1751.522922] env[62684]: _type = "Task" [ 1751.522922] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1751.534508] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052399, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.662447] env[62684]: DEBUG nova.compute.utils [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1751.668546] env[62684]: DEBUG nova.compute.manager [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1751.668546] env[62684]: DEBUG nova.network.neutron [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1751.695686] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Acquiring lock "f44b2e88-af6d-4252-b562-9d5fa7745b56" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1751.695686] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Lock "f44b2e88-af6d-4252-b562-9d5fa7745b56" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1751.708974] env[62684]: DEBUG nova.network.neutron [req-42f2b8b0-d391-459a-a366-33e5e84f2b11 req-a17c7fad-b3ef-4859-8a6e-bbff19c30843 service nova] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Updated VIF entry in instance network info cache for port 200d8b57-0aad-430f-8a16-63f7ce3d1668. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1751.709454] env[62684]: DEBUG nova.network.neutron [req-42f2b8b0-d391-459a-a366-33e5e84f2b11 req-a17c7fad-b3ef-4859-8a6e-bbff19c30843 service nova] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Updating instance_info_cache with network_info: [{"id": "200d8b57-0aad-430f-8a16-63f7ce3d1668", "address": "fa:16:3e:30:33:9d", "network": {"id": "8aa987c0-d5cd-4b21-a9ca-c01cb83c668a", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1441412280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca5fca9cf7854c218807cb17d86aff8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bd8c6be9-575e-4605-b779-98606281a3bf", "external-id": "nsx-vlan-transportzone-273", "segmentation_id": 273, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap200d8b57-0a", "ovs_interfaceid": "200d8b57-0aad-430f-8a16-63f7ce3d1668", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1751.751161] env[62684]: DEBUG oslo_vmware.api [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5259ecbb-c8f2-ff17-d49a-60f6fff266ca, 'name': SearchDatastore_Task, 'duration_secs': 0.053628} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1751.752068] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c62d914-c79e-4094-ae22-46a70ce5898d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.755931] env[62684]: DEBUG nova.policy [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4f9ace4d78b94a3db9eb74236fca1e6a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aef5d7061c834332b9f9c5c75596bf08', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1751.762633] env[62684]: DEBUG oslo_vmware.api [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Waiting for the task: (returnval){ [ 1751.762633] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520c0f80-55aa-6ab9-b6bb-8169a096ce6f" [ 1751.762633] env[62684]: _type = "Task" [ 1751.762633] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1751.771257] env[62684]: DEBUG oslo_vmware.api [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520c0f80-55aa-6ab9-b6bb-8169a096ce6f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.772739] env[62684]: DEBUG oslo_concurrency.lockutils [req-fd696198-c6e6-429a-ba3b-f76ca03d4ccb req-15708223-2dc0-4647-b75f-d3716da59ee2 service nova] Releasing lock "refresh_cache-6b1f0e69-3915-40dc-b4ec-93ab174f12b6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1751.901260] env[62684]: DEBUG oslo_vmware.api [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': task-2052396, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.561175} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1751.901692] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 6b1f0e69-3915-40dc-b4ec-93ab174f12b6/6b1f0e69-3915-40dc-b4ec-93ab174f12b6.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1751.901775] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1751.902043] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1536c774-786e-47aa-bb53-483b6e8beb24 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.909497] env[62684]: DEBUG oslo_vmware.api [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Waiting for the task: (returnval){ [ 1751.909497] env[62684]: value = "task-2052400" [ 1751.909497] env[62684]: _type = "Task" [ 1751.909497] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1751.918898] env[62684]: DEBUG oslo_vmware.api [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': task-2052400, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.991514] env[62684]: DEBUG oslo_vmware.api [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Task: {'id': task-2052395, 'name': PowerOnVM_Task, 'duration_secs': 1.194361} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1751.991927] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1751.992120] env[62684]: INFO nova.compute.manager [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Took 11.35 seconds to spawn the instance on the hypervisor. [ 1751.992303] env[62684]: DEBUG nova.compute.manager [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1751.993511] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e211da1a-67c5-4488-9802-6edf9b8636a4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.032957] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052399, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.167724] env[62684]: DEBUG nova.compute.manager [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1752.212519] env[62684]: DEBUG oslo_concurrency.lockutils [req-42f2b8b0-d391-459a-a366-33e5e84f2b11 req-a17c7fad-b3ef-4859-8a6e-bbff19c30843 service nova] Releasing lock "refresh_cache-43d28811-26e4-4016-9f82-98349d4a05b7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1752.256333] env[62684]: DEBUG nova.network.neutron [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Successfully created port: 617e9c54-b56e-4945-b890-de6be33b657b {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1752.273782] env[62684]: DEBUG oslo_vmware.api [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520c0f80-55aa-6ab9-b6bb-8169a096ce6f, 'name': SearchDatastore_Task, 'duration_secs': 0.019159} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1752.274030] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1752.274997] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 43d28811-26e4-4016-9f82-98349d4a05b7/43d28811-26e4-4016-9f82-98349d4a05b7.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1752.274997] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-861e08fe-c00b-4858-9c9b-f7e5ce175300 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.282080] env[62684]: DEBUG oslo_vmware.api [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Waiting for the task: (returnval){ [ 1752.282080] env[62684]: value = "task-2052401" [ 1752.282080] env[62684]: _type = "Task" [ 1752.282080] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.293742] env[62684]: DEBUG oslo_vmware.api [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Task: {'id': task-2052401, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.423199] env[62684]: DEBUG oslo_vmware.api [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': task-2052400, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070189} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1752.423508] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1752.424437] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7d0a213-cfc1-4ee9-87fa-548f36289fe4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.457555] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Reconfiguring VM instance instance-0000000b to attach disk [datastore2] 6b1f0e69-3915-40dc-b4ec-93ab174f12b6/6b1f0e69-3915-40dc-b4ec-93ab174f12b6.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1752.461609] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cc551abd-e212-4cec-a3c8-cd3a85ad4475 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.481840] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Acquiring lock "dfe40a8c-61d6-4c60-afd3-0defb61c4308" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1752.481840] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Lock "dfe40a8c-61d6-4c60-afd3-0defb61c4308" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1752.485989] env[62684]: DEBUG oslo_vmware.api [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Waiting for the task: (returnval){ [ 1752.485989] env[62684]: value = "task-2052402" [ 1752.485989] env[62684]: _type = "Task" [ 1752.485989] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.498865] env[62684]: DEBUG oslo_vmware.api [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': task-2052402, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.518943] env[62684]: INFO nova.compute.manager [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Took 24.60 seconds to build instance. [ 1752.536507] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052399, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.714105] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fab007cd-3c2c-4df0-8dcf-b508dbebdfa8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.725375] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4735b7cd-03e6-4f1e-860d-634ded42cccd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.759619] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8ca476a-c11b-44cb-8f69-9ca6d5333660 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.771822] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0f31149-2323-403c-96ec-379e12cf7310 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.788648] env[62684]: DEBUG nova.compute.provider_tree [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1752.801719] env[62684]: DEBUG oslo_vmware.api [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Task: {'id': task-2052401, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.999070] env[62684]: DEBUG oslo_vmware.api [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': task-2052402, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.021564] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33ec88c1-591b-44ad-b635-4dcc91a424c7 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Lock "d532b5fa-90a3-4f25-8684-4eabaa432c86" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.134s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1753.036667] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052399, 'name': CreateVM_Task, 'duration_secs': 1.474732} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1753.036820] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1753.037631] env[62684]: DEBUG oslo_concurrency.lockutils [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1753.037902] env[62684]: DEBUG oslo_concurrency.lockutils [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1753.038198] env[62684]: DEBUG oslo_concurrency.lockutils [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1753.039418] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-652e0fdd-7398-44fe-a419-6e1137897782 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.046409] env[62684]: DEBUG oslo_vmware.api [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Waiting for the task: (returnval){ [ 1753.046409] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a23ef6-f1ac-45d7-18ad-03e9e3829849" [ 1753.046409] env[62684]: _type = "Task" [ 1753.046409] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1753.057261] env[62684]: DEBUG oslo_vmware.api [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a23ef6-f1ac-45d7-18ad-03e9e3829849, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.185593] env[62684]: DEBUG nova.compute.manager [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1753.212616] env[62684]: DEBUG nova.virt.hardware [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1753.212616] env[62684]: DEBUG nova.virt.hardware [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1753.213174] env[62684]: DEBUG nova.virt.hardware [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1753.213174] env[62684]: DEBUG nova.virt.hardware [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1753.213174] env[62684]: DEBUG nova.virt.hardware [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1753.213274] env[62684]: DEBUG nova.virt.hardware [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1753.213417] env[62684]: DEBUG nova.virt.hardware [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1753.213634] env[62684]: DEBUG nova.virt.hardware [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1753.213772] env[62684]: DEBUG nova.virt.hardware [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1753.213965] env[62684]: DEBUG nova.virt.hardware [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1753.214184] env[62684]: DEBUG nova.virt.hardware [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1753.215473] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e0cbdbc-995c-4e8e-a0e2-4442880e9e52 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.227679] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3142c54-5189-4138-b24f-2ad2b4f4309e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.294273] env[62684]: DEBUG nova.scheduler.client.report [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1753.301162] env[62684]: DEBUG oslo_vmware.api [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Task: {'id': task-2052401, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.644552} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1753.301666] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 43d28811-26e4-4016-9f82-98349d4a05b7/43d28811-26e4-4016-9f82-98349d4a05b7.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1753.301894] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1753.302161] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-82a5bea1-3bc3-4a99-b2e9-0d506b2ba9a6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.310500] env[62684]: DEBUG oslo_vmware.api [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Waiting for the task: (returnval){ [ 1753.310500] env[62684]: value = "task-2052403" [ 1753.310500] env[62684]: _type = "Task" [ 1753.310500] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1753.321380] env[62684]: DEBUG oslo_vmware.api [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Task: {'id': task-2052403, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.456309] env[62684]: DEBUG nova.compute.manager [req-e26d43bc-dc5d-4370-a397-ca5a5596fcd3 req-a2ec0bfc-7f90-425f-9615-d5557474bea4 service nova] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Received event network-vif-plugged-30471946-98e4-4413-acb5-8a8190d1dd82 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1753.456574] env[62684]: DEBUG oslo_concurrency.lockutils [req-e26d43bc-dc5d-4370-a397-ca5a5596fcd3 req-a2ec0bfc-7f90-425f-9615-d5557474bea4 service nova] Acquiring lock "73f27fc0-ebae-41c7-b292-14396f79a5a2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1753.456860] env[62684]: DEBUG oslo_concurrency.lockutils [req-e26d43bc-dc5d-4370-a397-ca5a5596fcd3 req-a2ec0bfc-7f90-425f-9615-d5557474bea4 service nova] Lock "73f27fc0-ebae-41c7-b292-14396f79a5a2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1753.456976] env[62684]: DEBUG oslo_concurrency.lockutils [req-e26d43bc-dc5d-4370-a397-ca5a5596fcd3 req-a2ec0bfc-7f90-425f-9615-d5557474bea4 service nova] Lock "73f27fc0-ebae-41c7-b292-14396f79a5a2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1753.457084] env[62684]: DEBUG nova.compute.manager [req-e26d43bc-dc5d-4370-a397-ca5a5596fcd3 req-a2ec0bfc-7f90-425f-9615-d5557474bea4 service nova] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] No waiting events found dispatching network-vif-plugged-30471946-98e4-4413-acb5-8a8190d1dd82 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1753.458044] env[62684]: WARNING nova.compute.manager [req-e26d43bc-dc5d-4370-a397-ca5a5596fcd3 req-a2ec0bfc-7f90-425f-9615-d5557474bea4 service nova] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Received unexpected event network-vif-plugged-30471946-98e4-4413-acb5-8a8190d1dd82 for instance with vm_state building and task_state spawning. [ 1753.458044] env[62684]: DEBUG nova.compute.manager [req-e26d43bc-dc5d-4370-a397-ca5a5596fcd3 req-a2ec0bfc-7f90-425f-9615-d5557474bea4 service nova] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Received event network-changed-30471946-98e4-4413-acb5-8a8190d1dd82 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1753.458044] env[62684]: DEBUG nova.compute.manager [req-e26d43bc-dc5d-4370-a397-ca5a5596fcd3 req-a2ec0bfc-7f90-425f-9615-d5557474bea4 service nova] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Refreshing instance network info cache due to event network-changed-30471946-98e4-4413-acb5-8a8190d1dd82. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1753.458044] env[62684]: DEBUG oslo_concurrency.lockutils [req-e26d43bc-dc5d-4370-a397-ca5a5596fcd3 req-a2ec0bfc-7f90-425f-9615-d5557474bea4 service nova] Acquiring lock "refresh_cache-73f27fc0-ebae-41c7-b292-14396f79a5a2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1753.458044] env[62684]: DEBUG oslo_concurrency.lockutils [req-e26d43bc-dc5d-4370-a397-ca5a5596fcd3 req-a2ec0bfc-7f90-425f-9615-d5557474bea4 service nova] Acquired lock "refresh_cache-73f27fc0-ebae-41c7-b292-14396f79a5a2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1753.458866] env[62684]: DEBUG nova.network.neutron [req-e26d43bc-dc5d-4370-a397-ca5a5596fcd3 req-a2ec0bfc-7f90-425f-9615-d5557474bea4 service nova] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Refreshing network info cache for port 30471946-98e4-4413-acb5-8a8190d1dd82 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1753.500083] env[62684]: DEBUG oslo_vmware.api [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': task-2052402, 'name': ReconfigVM_Task, 'duration_secs': 0.645024} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1753.500554] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Reconfigured VM instance instance-0000000b to attach disk [datastore2] 6b1f0e69-3915-40dc-b4ec-93ab174f12b6/6b1f0e69-3915-40dc-b4ec-93ab174f12b6.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1753.501019] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6846a0bd-e6f4-4e7e-bf4a-f5bc4c62c6b3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.509729] env[62684]: DEBUG oslo_vmware.api [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Waiting for the task: (returnval){ [ 1753.509729] env[62684]: value = "task-2052404" [ 1753.509729] env[62684]: _type = "Task" [ 1753.509729] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1753.519016] env[62684]: DEBUG oslo_vmware.api [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': task-2052404, 'name': Rename_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.524560] env[62684]: DEBUG nova.compute.manager [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1753.557505] env[62684]: DEBUG oslo_vmware.api [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a23ef6-f1ac-45d7-18ad-03e9e3829849, 'name': SearchDatastore_Task, 'duration_secs': 0.01255} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1753.557854] env[62684]: DEBUG oslo_concurrency.lockutils [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1753.558255] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1753.558531] env[62684]: DEBUG oslo_concurrency.lockutils [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1753.558684] env[62684]: DEBUG oslo_concurrency.lockutils [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1753.558762] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1753.559500] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-11823a25-63d3-410c-9be6-43ecd9fceada {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.568826] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1753.569054] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1753.569822] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f684320-3cdd-4738-9642-3d1214a20fe7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.576680] env[62684]: DEBUG oslo_vmware.api [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Waiting for the task: (returnval){ [ 1753.576680] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e2f820-bc2a-8df6-7e20-55157604eac7" [ 1753.576680] env[62684]: _type = "Task" [ 1753.576680] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1753.586920] env[62684]: DEBUG oslo_vmware.api [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e2f820-bc2a-8df6-7e20-55157604eac7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.802534] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.650s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1753.804396] env[62684]: DEBUG nova.compute.manager [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1753.806177] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.793s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1753.807911] env[62684]: INFO nova.compute.claims [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1753.820796] env[62684]: DEBUG oslo_vmware.api [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Task: {'id': task-2052403, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.119447} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1753.824022] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1753.824022] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-842f5a7c-064f-49c0-9e32-3cba597a4be2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.848267] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Reconfiguring VM instance instance-0000000e to attach disk [datastore2] 43d28811-26e4-4016-9f82-98349d4a05b7/43d28811-26e4-4016-9f82-98349d4a05b7.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1753.849489] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d37bfcb-ed3a-4e8e-a821-d50cf90466c0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.874260] env[62684]: DEBUG oslo_vmware.api [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Waiting for the task: (returnval){ [ 1753.874260] env[62684]: value = "task-2052405" [ 1753.874260] env[62684]: _type = "Task" [ 1753.874260] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1753.887708] env[62684]: DEBUG oslo_vmware.api [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Task: {'id': task-2052405, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.020517] env[62684]: DEBUG oslo_vmware.api [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': task-2052404, 'name': Rename_Task, 'duration_secs': 0.19735} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1754.020958] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1754.021133] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-30d0b42a-7093-4c65-b629-817c9f05d943 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.031402] env[62684]: DEBUG oslo_vmware.api [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Waiting for the task: (returnval){ [ 1754.031402] env[62684]: value = "task-2052406" [ 1754.031402] env[62684]: _type = "Task" [ 1754.031402] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.043898] env[62684]: DEBUG oslo_vmware.api [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': task-2052406, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.058756] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1754.090038] env[62684]: DEBUG oslo_vmware.api [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e2f820-bc2a-8df6-7e20-55157604eac7, 'name': SearchDatastore_Task, 'duration_secs': 0.011441} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1754.094206] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69d4358d-7e7d-4b9c-87b3-9a79a0df890d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.102009] env[62684]: DEBUG oslo_vmware.api [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Waiting for the task: (returnval){ [ 1754.102009] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]528e9951-8bcf-acfd-a657-a0c6a780d4a3" [ 1754.102009] env[62684]: _type = "Task" [ 1754.102009] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.113687] env[62684]: DEBUG oslo_vmware.api [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]528e9951-8bcf-acfd-a657-a0c6a780d4a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.253278] env[62684]: DEBUG nova.network.neutron [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Successfully updated port: 617e9c54-b56e-4945-b890-de6be33b657b {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1754.312519] env[62684]: DEBUG nova.compute.utils [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1754.320455] env[62684]: DEBUG nova.compute.manager [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1754.320657] env[62684]: DEBUG nova.network.neutron [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1754.388229] env[62684]: DEBUG oslo_vmware.api [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Task: {'id': task-2052405, 'name': ReconfigVM_Task, 'duration_secs': 0.479188} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1754.388711] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Reconfigured VM instance instance-0000000e to attach disk [datastore2] 43d28811-26e4-4016-9f82-98349d4a05b7/43d28811-26e4-4016-9f82-98349d4a05b7.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1754.389716] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-73fff320-a95c-4b6d-be3d-ed28316eef24 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.398787] env[62684]: DEBUG oslo_vmware.api [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Waiting for the task: (returnval){ [ 1754.398787] env[62684]: value = "task-2052407" [ 1754.398787] env[62684]: _type = "Task" [ 1754.398787] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.412914] env[62684]: DEBUG oslo_vmware.api [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Task: {'id': task-2052407, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.414340] env[62684]: DEBUG nova.network.neutron [req-e26d43bc-dc5d-4370-a397-ca5a5596fcd3 req-a2ec0bfc-7f90-425f-9615-d5557474bea4 service nova] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Updated VIF entry in instance network info cache for port 30471946-98e4-4413-acb5-8a8190d1dd82. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1754.414812] env[62684]: DEBUG nova.network.neutron [req-e26d43bc-dc5d-4370-a397-ca5a5596fcd3 req-a2ec0bfc-7f90-425f-9615-d5557474bea4 service nova] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Updating instance_info_cache with network_info: [{"id": "30471946-98e4-4413-acb5-8a8190d1dd82", "address": "fa:16:3e:85:31:4c", "network": {"id": "7982dcb9-e661-4690-9931-bf412f4a564e", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1278071472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72ac36eda47d4c51a4b421c764d0404d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9bb629cd-6d0f-4bed-965c-bd04a2f3ec49", "external-id": "nsx-vlan-transportzone-848", "segmentation_id": 848, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30471946-98", "ovs_interfaceid": "30471946-98e4-4413-acb5-8a8190d1dd82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1754.422396] env[62684]: DEBUG nova.policy [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b481b4a788ec45e18a9cfccae2f0ac26', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c873948cf2a646008a7fffc544a6a8fd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1754.544241] env[62684]: DEBUG oslo_vmware.api [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': task-2052406, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.616736] env[62684]: DEBUG oslo_vmware.api [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]528e9951-8bcf-acfd-a657-a0c6a780d4a3, 'name': SearchDatastore_Task, 'duration_secs': 0.024407} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1754.617282] env[62684]: DEBUG oslo_concurrency.lockutils [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1754.617700] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 73f27fc0-ebae-41c7-b292-14396f79a5a2/73f27fc0-ebae-41c7-b292-14396f79a5a2.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1754.618159] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0e4d394c-44a2-4f31-9d90-d54bc4a53022 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.629019] env[62684]: DEBUG oslo_vmware.api [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Waiting for the task: (returnval){ [ 1754.629019] env[62684]: value = "task-2052408" [ 1754.629019] env[62684]: _type = "Task" [ 1754.629019] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.638324] env[62684]: DEBUG oslo_vmware.api [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': task-2052408, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.757468] env[62684]: DEBUG oslo_concurrency.lockutils [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquiring lock "refresh_cache-dcb0a5b2-379e-44ff-a9b0-be615943c94e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1754.757468] env[62684]: DEBUG oslo_concurrency.lockutils [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquired lock "refresh_cache-dcb0a5b2-379e-44ff-a9b0-be615943c94e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1754.757468] env[62684]: DEBUG nova.network.neutron [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1754.824593] env[62684]: DEBUG nova.compute.manager [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1754.912211] env[62684]: DEBUG oslo_vmware.api [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Task: {'id': task-2052407, 'name': Rename_Task, 'duration_secs': 0.210992} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1754.916795] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1754.917662] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9a2dd751-57f4-4c44-a77d-a1e327c5ded5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.923372] env[62684]: DEBUG oslo_concurrency.lockutils [req-e26d43bc-dc5d-4370-a397-ca5a5596fcd3 req-a2ec0bfc-7f90-425f-9615-d5557474bea4 service nova] Releasing lock "refresh_cache-73f27fc0-ebae-41c7-b292-14396f79a5a2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1754.935020] env[62684]: DEBUG oslo_vmware.api [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Waiting for the task: (returnval){ [ 1754.935020] env[62684]: value = "task-2052409" [ 1754.935020] env[62684]: _type = "Task" [ 1754.935020] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.951516] env[62684]: DEBUG oslo_vmware.api [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Task: {'id': task-2052409, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.051085] env[62684]: DEBUG oslo_vmware.api [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': task-2052406, 'name': PowerOnVM_Task, 'duration_secs': 0.610515} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1755.051977] env[62684]: DEBUG nova.network.neutron [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Successfully created port: c0047526-de96-4c14-8230-e69c53c790af {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1755.054259] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1755.054510] env[62684]: INFO nova.compute.manager [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Took 11.79 seconds to spawn the instance on the hypervisor. [ 1755.054719] env[62684]: DEBUG nova.compute.manager [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1755.055602] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb24dbd5-ad8b-4071-bf8c-8a70727d9165 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.141932] env[62684]: DEBUG oslo_vmware.api [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': task-2052408, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.343028] env[62684]: INFO nova.virt.block_device [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Booting with volume 34523d13-ed90-416e-a19a-57c837136d21 at /dev/sda [ 1755.364980] env[62684]: DEBUG nova.network.neutron [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1755.420345] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-07be3dc5-2a58-4aaf-8a1a-d14736c8011c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.434981] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3e42f09-6c4b-4c3e-9e3f-95cdda114844 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.454117] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea089be8-16b4-4441-8eba-af41f2bd1082 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.471266] env[62684]: DEBUG oslo_vmware.api [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Task: {'id': task-2052409, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.473094] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31e60ee6-71b0-45e1-8451-9269c8b64d61 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.489468] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9672f101-fb1c-4e37-9f9f-29e917f9ad2c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.528522] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98cea56d-1866-4399-8e2e-c979b8d5a1b3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.535773] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fd7cc37-92a3-4081-94ea-a33317c6be20 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.551807] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49960286-5955-4637-9e3e-f1645245ce4a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.577997] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0c826d3-4294-4bb3-88b1-f3bc6acb002a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.592643] env[62684]: DEBUG nova.compute.provider_tree [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1755.597652] env[62684]: INFO nova.compute.manager [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Took 27.67 seconds to build instance. [ 1755.605972] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1f4f2f0-f5b8-44c4-bd0c-6fe3d58ee334 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.626587] env[62684]: DEBUG nova.virt.block_device [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Updating existing volume attachment record: f2877eb4-dd2d-4df8-91a2-806984307811 {{(pid=62684) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1755.631219] env[62684]: DEBUG oslo_vmware.rw_handles [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525a881a-c1da-11de-7302-0745902fcf95/disk-0.vmdk. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1755.631749] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95b22f96-fcc1-4950-b03f-4da77764eb3b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.643143] env[62684]: DEBUG oslo_vmware.rw_handles [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525a881a-c1da-11de-7302-0745902fcf95/disk-0.vmdk is in state: ready. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1755.643143] env[62684]: ERROR oslo_vmware.rw_handles [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525a881a-c1da-11de-7302-0745902fcf95/disk-0.vmdk due to incomplete transfer. [ 1755.649354] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-0e661c8e-0d9b-4bb1-a459-4d75702e2dc0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.649354] env[62684]: DEBUG oslo_vmware.api [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': task-2052408, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.794331} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1755.649354] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 73f27fc0-ebae-41c7-b292-14396f79a5a2/73f27fc0-ebae-41c7-b292-14396f79a5a2.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1755.649354] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1755.649705] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7e0840de-207e-4e5c-8cf8-c77568557b85 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.657036] env[62684]: DEBUG oslo_vmware.rw_handles [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525a881a-c1da-11de-7302-0745902fcf95/disk-0.vmdk. {{(pid=62684) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1755.657036] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Uploaded image cb72481b-e414-4c6f-a209-0ec1dbe35df0 to the Glance image server {{(pid=62684) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1755.659031] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Destroying the VM {{(pid=62684) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1755.661294] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-f755bb1d-6e97-42f6-a9ad-d98c67895182 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.663369] env[62684]: DEBUG oslo_vmware.api [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Waiting for the task: (returnval){ [ 1755.663369] env[62684]: value = "task-2052410" [ 1755.663369] env[62684]: _type = "Task" [ 1755.663369] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1755.670184] env[62684]: DEBUG oslo_vmware.api [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1755.670184] env[62684]: value = "task-2052411" [ 1755.670184] env[62684]: _type = "Task" [ 1755.670184] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1755.679057] env[62684]: DEBUG oslo_vmware.api [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': task-2052410, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.684842] env[62684]: DEBUG oslo_vmware.api [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052411, 'name': Destroy_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.826274] env[62684]: DEBUG nova.network.neutron [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Updating instance_info_cache with network_info: [{"id": "617e9c54-b56e-4945-b890-de6be33b657b", "address": "fa:16:3e:11:51:36", "network": {"id": "bca0ee43-bbb1-483b-9d82-56955369f9b7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1592250106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aef5d7061c834332b9f9c5c75596bf08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bfa7abe-7e46-4d8f-b50a-4d0c4509e4dc", "external-id": "nsx-vlan-transportzone-951", "segmentation_id": 951, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap617e9c54-b5", "ovs_interfaceid": "617e9c54-b56e-4945-b890-de6be33b657b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1755.965635] env[62684]: DEBUG oslo_vmware.api [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Task: {'id': task-2052409, 'name': PowerOnVM_Task, 'duration_secs': 0.851483} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1755.966009] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1755.966203] env[62684]: INFO nova.compute.manager [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Took 10.21 seconds to spawn the instance on the hypervisor. [ 1755.966559] env[62684]: DEBUG nova.compute.manager [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1755.967398] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf124283-05f8-4bd2-b3e1-2242b531a2a4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.985434] env[62684]: DEBUG nova.compute.manager [None req-effcca01-c294-49f8-80ce-bfecd0a27f08 tempest-ServerDiagnosticsV248Test-289604052 tempest-ServerDiagnosticsV248Test-289604052-project-admin] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1755.987536] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8d1031f-38cb-4509-8e04-7526cdd7b726 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.996015] env[62684]: INFO nova.compute.manager [None req-effcca01-c294-49f8-80ce-bfecd0a27f08 tempest-ServerDiagnosticsV248Test-289604052 tempest-ServerDiagnosticsV248Test-289604052-project-admin] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Retrieving diagnostics [ 1755.996935] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea3ed7fe-9d09-43c7-86f2-66ff9f676b57 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.101697] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bbea26bc-6bc9-4c77-8ec1-5ddc391b7149 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Lock "6b1f0e69-3915-40dc-b4ec-93ab174f12b6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.192s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1756.102749] env[62684]: DEBUG nova.scheduler.client.report [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1756.176837] env[62684]: DEBUG oslo_vmware.api [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': task-2052410, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.241912} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1756.177255] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1756.179169] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c1655a0-a7da-44d1-a74e-a0ceadad188b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.185060] env[62684]: DEBUG oslo_vmware.api [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052411, 'name': Destroy_Task} progress is 33%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.208907] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Reconfiguring VM instance instance-0000000d to attach disk [datastore1] 73f27fc0-ebae-41c7-b292-14396f79a5a2/73f27fc0-ebae-41c7-b292-14396f79a5a2.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1756.209336] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef9289b3-6afa-4890-b6b8-80e42c4b6cf0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.235608] env[62684]: DEBUG oslo_vmware.api [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Waiting for the task: (returnval){ [ 1756.235608] env[62684]: value = "task-2052412" [ 1756.235608] env[62684]: _type = "Task" [ 1756.235608] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.245796] env[62684]: DEBUG oslo_vmware.api [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': task-2052412, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.329711] env[62684]: DEBUG oslo_concurrency.lockutils [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Releasing lock "refresh_cache-dcb0a5b2-379e-44ff-a9b0-be615943c94e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1756.329711] env[62684]: DEBUG nova.compute.manager [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Instance network_info: |[{"id": "617e9c54-b56e-4945-b890-de6be33b657b", "address": "fa:16:3e:11:51:36", "network": {"id": "bca0ee43-bbb1-483b-9d82-56955369f9b7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1592250106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aef5d7061c834332b9f9c5c75596bf08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bfa7abe-7e46-4d8f-b50a-4d0c4509e4dc", "external-id": "nsx-vlan-transportzone-951", "segmentation_id": 951, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap617e9c54-b5", "ovs_interfaceid": "617e9c54-b56e-4945-b890-de6be33b657b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1756.329836] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:11:51:36', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8bfa7abe-7e46-4d8f-b50a-4d0c4509e4dc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '617e9c54-b56e-4945-b890-de6be33b657b', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1756.338044] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Creating folder: Project (aef5d7061c834332b9f9c5c75596bf08). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1756.338397] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-63e9337b-139b-4546-aee4-b45b9a97fada {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.351968] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Created folder: Project (aef5d7061c834332b9f9c5c75596bf08) in parent group-v421118. [ 1756.352212] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Creating folder: Instances. Parent ref: group-v421169. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1756.352474] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f60acce9-595e-4bff-ab41-5ddd98190368 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.367343] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Created folder: Instances in parent group-v421169. [ 1756.367601] env[62684]: DEBUG oslo.service.loopingcall [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1756.367802] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1756.368037] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b871d20d-fc69-424f-992a-15596073f032 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.391603] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1756.391603] env[62684]: value = "task-2052415" [ 1756.391603] env[62684]: _type = "Task" [ 1756.391603] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.405324] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052415, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.491395] env[62684]: INFO nova.compute.manager [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Took 23.96 seconds to build instance. [ 1756.585596] env[62684]: DEBUG nova.compute.manager [req-37952bd6-66fe-465f-ac01-8185126ccb40 req-f1cf7f98-7211-4a23-8dc7-de0040080acf service nova] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Received event network-vif-plugged-617e9c54-b56e-4945-b890-de6be33b657b {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1756.585889] env[62684]: DEBUG oslo_concurrency.lockutils [req-37952bd6-66fe-465f-ac01-8185126ccb40 req-f1cf7f98-7211-4a23-8dc7-de0040080acf service nova] Acquiring lock "dcb0a5b2-379e-44ff-a9b0-be615943c94e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1756.586425] env[62684]: DEBUG oslo_concurrency.lockutils [req-37952bd6-66fe-465f-ac01-8185126ccb40 req-f1cf7f98-7211-4a23-8dc7-de0040080acf service nova] Lock "dcb0a5b2-379e-44ff-a9b0-be615943c94e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1756.586792] env[62684]: DEBUG oslo_concurrency.lockutils [req-37952bd6-66fe-465f-ac01-8185126ccb40 req-f1cf7f98-7211-4a23-8dc7-de0040080acf service nova] Lock "dcb0a5b2-379e-44ff-a9b0-be615943c94e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1756.587009] env[62684]: DEBUG nova.compute.manager [req-37952bd6-66fe-465f-ac01-8185126ccb40 req-f1cf7f98-7211-4a23-8dc7-de0040080acf service nova] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] No waiting events found dispatching network-vif-plugged-617e9c54-b56e-4945-b890-de6be33b657b {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1756.587280] env[62684]: WARNING nova.compute.manager [req-37952bd6-66fe-465f-ac01-8185126ccb40 req-f1cf7f98-7211-4a23-8dc7-de0040080acf service nova] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Received unexpected event network-vif-plugged-617e9c54-b56e-4945-b890-de6be33b657b for instance with vm_state building and task_state spawning. [ 1756.588074] env[62684]: DEBUG nova.compute.manager [req-37952bd6-66fe-465f-ac01-8185126ccb40 req-f1cf7f98-7211-4a23-8dc7-de0040080acf service nova] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Received event network-changed-617e9c54-b56e-4945-b890-de6be33b657b {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1756.588357] env[62684]: DEBUG nova.compute.manager [req-37952bd6-66fe-465f-ac01-8185126ccb40 req-f1cf7f98-7211-4a23-8dc7-de0040080acf service nova] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Refreshing instance network info cache due to event network-changed-617e9c54-b56e-4945-b890-de6be33b657b. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1756.588644] env[62684]: DEBUG oslo_concurrency.lockutils [req-37952bd6-66fe-465f-ac01-8185126ccb40 req-f1cf7f98-7211-4a23-8dc7-de0040080acf service nova] Acquiring lock "refresh_cache-dcb0a5b2-379e-44ff-a9b0-be615943c94e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1756.588914] env[62684]: DEBUG oslo_concurrency.lockutils [req-37952bd6-66fe-465f-ac01-8185126ccb40 req-f1cf7f98-7211-4a23-8dc7-de0040080acf service nova] Acquired lock "refresh_cache-dcb0a5b2-379e-44ff-a9b0-be615943c94e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1756.590185] env[62684]: DEBUG nova.network.neutron [req-37952bd6-66fe-465f-ac01-8185126ccb40 req-f1cf7f98-7211-4a23-8dc7-de0040080acf service nova] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Refreshing network info cache for port 617e9c54-b56e-4945-b890-de6be33b657b {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1756.606991] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.801s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1756.610063] env[62684]: DEBUG nova.compute.manager [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1756.611869] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.708s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1756.614550] env[62684]: INFO nova.compute.claims [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1756.615275] env[62684]: DEBUG nova.compute.manager [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1756.683898] env[62684]: DEBUG oslo_vmware.api [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052411, 'name': Destroy_Task, 'duration_secs': 0.839717} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1756.684155] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Destroyed the VM [ 1756.684546] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Deleting Snapshot of the VM instance {{(pid=62684) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1756.685061] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-786ef63e-8f58-4642-98e3-2405a7804580 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.693827] env[62684]: DEBUG oslo_vmware.api [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1756.693827] env[62684]: value = "task-2052416" [ 1756.693827] env[62684]: _type = "Task" [ 1756.693827] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.704779] env[62684]: DEBUG oslo_vmware.api [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052416, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.753924] env[62684]: DEBUG oslo_vmware.api [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': task-2052412, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.904127] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052415, 'name': CreateVM_Task, 'duration_secs': 0.412861} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1756.904345] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1756.905280] env[62684]: DEBUG oslo_concurrency.lockutils [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1756.905495] env[62684]: DEBUG oslo_concurrency.lockutils [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1756.906539] env[62684]: DEBUG oslo_concurrency.lockutils [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1756.906539] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-220e3081-3c74-44a4-b99d-747c4a7a0b12 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.912637] env[62684]: DEBUG oslo_vmware.api [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1756.912637] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d39fff-6251-445f-ad08-476512ecdfe4" [ 1756.912637] env[62684]: _type = "Task" [ 1756.912637] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.921748] env[62684]: DEBUG oslo_vmware.api [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d39fff-6251-445f-ad08-476512ecdfe4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.992058] env[62684]: DEBUG nova.network.neutron [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Successfully updated port: c0047526-de96-4c14-8230-e69c53c790af {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1756.993556] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7690626d-7c80-4a7e-a327-fb4c30f5b3a6 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Lock "43d28811-26e4-4016-9f82-98349d4a05b7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.835s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1757.110989] env[62684]: DEBUG nova.compute.manager [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1757.110989] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d814832-7af4-418d-aae4-20520f211cca {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.120099] env[62684]: DEBUG nova.compute.utils [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1757.136020] env[62684]: DEBUG nova.compute.manager [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1757.136020] env[62684]: DEBUG nova.network.neutron [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1757.167479] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1757.207847] env[62684]: DEBUG oslo_vmware.api [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052416, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.250390] env[62684]: DEBUG oslo_vmware.api [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': task-2052412, 'name': ReconfigVM_Task, 'duration_secs': 0.597092} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.250835] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Reconfigured VM instance instance-0000000d to attach disk [datastore1] 73f27fc0-ebae-41c7-b292-14396f79a5a2/73f27fc0-ebae-41c7-b292-14396f79a5a2.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1757.251640] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-90dc0206-c5d4-405c-8bda-44d9eccc72bb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.259608] env[62684]: DEBUG oslo_vmware.api [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Waiting for the task: (returnval){ [ 1757.259608] env[62684]: value = "task-2052417" [ 1757.259608] env[62684]: _type = "Task" [ 1757.259608] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1757.270323] env[62684]: DEBUG oslo_vmware.api [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': task-2052417, 'name': Rename_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.367960] env[62684]: DEBUG nova.network.neutron [req-37952bd6-66fe-465f-ac01-8185126ccb40 req-f1cf7f98-7211-4a23-8dc7-de0040080acf service nova] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Updated VIF entry in instance network info cache for port 617e9c54-b56e-4945-b890-de6be33b657b. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1757.368377] env[62684]: DEBUG nova.network.neutron [req-37952bd6-66fe-465f-ac01-8185126ccb40 req-f1cf7f98-7211-4a23-8dc7-de0040080acf service nova] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Updating instance_info_cache with network_info: [{"id": "617e9c54-b56e-4945-b890-de6be33b657b", "address": "fa:16:3e:11:51:36", "network": {"id": "bca0ee43-bbb1-483b-9d82-56955369f9b7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1592250106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aef5d7061c834332b9f9c5c75596bf08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bfa7abe-7e46-4d8f-b50a-4d0c4509e4dc", "external-id": "nsx-vlan-transportzone-951", "segmentation_id": 951, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap617e9c54-b5", "ovs_interfaceid": "617e9c54-b56e-4945-b890-de6be33b657b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1757.427463] env[62684]: DEBUG oslo_vmware.api [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d39fff-6251-445f-ad08-476512ecdfe4, 'name': SearchDatastore_Task, 'duration_secs': 0.012311} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.427463] env[62684]: DEBUG oslo_concurrency.lockutils [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1757.428966] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1757.428966] env[62684]: DEBUG oslo_concurrency.lockutils [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1757.429284] env[62684]: DEBUG oslo_concurrency.lockutils [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1757.429575] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1757.430728] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-55295d98-9a4c-4a4c-a69c-7450c1637c55 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.444657] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1757.444941] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1757.445736] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9286965-9995-4701-a327-f9da004551bc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.453308] env[62684]: DEBUG oslo_vmware.api [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1757.453308] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]524047eb-b682-6240-d369-d725c77a42cc" [ 1757.453308] env[62684]: _type = "Task" [ 1757.453308] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1757.463477] env[62684]: DEBUG oslo_vmware.api [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]524047eb-b682-6240-d369-d725c77a42cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.489161] env[62684]: DEBUG nova.policy [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dce421dcd2704644b9ea85753cd9986e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '84eadd714b9646489a5093086fb06aef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1757.494788] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Acquiring lock "refresh_cache-5bc73032-45f9-4b5c-a4ea-e07c48e4f82b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1757.494965] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Acquired lock "refresh_cache-5bc73032-45f9-4b5c-a4ea-e07c48e4f82b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1757.495131] env[62684]: DEBUG nova.network.neutron [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1757.497094] env[62684]: DEBUG nova.compute.manager [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1757.634017] env[62684]: DEBUG nova.compute.manager [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1757.642691] env[62684]: INFO nova.compute.manager [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] instance snapshotting [ 1757.647525] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07cc72d3-3224-40cb-9b9b-bc411aa079a1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.672500] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-900ee802-fc77-478d-84f6-5f701911df24 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.707363] env[62684]: DEBUG oslo_vmware.api [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052416, 'name': RemoveSnapshot_Task, 'duration_secs': 0.56414} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.707655] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Deleted Snapshot of the VM instance {{(pid=62684) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1757.707886] env[62684]: INFO nova.compute.manager [None req-2e8ed18e-79d4-4465-aa7b-43dd7830fbb5 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Took 15.40 seconds to snapshot the instance on the hypervisor. [ 1757.778279] env[62684]: DEBUG oslo_vmware.api [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': task-2052417, 'name': Rename_Task, 'duration_secs': 0.197569} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.778279] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1757.778279] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a16398ea-2da6-4832-a474-58cf66cb119a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.784812] env[62684]: DEBUG oslo_vmware.api [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Waiting for the task: (returnval){ [ 1757.784812] env[62684]: value = "task-2052418" [ 1757.784812] env[62684]: _type = "Task" [ 1757.784812] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1757.794839] env[62684]: DEBUG oslo_vmware.api [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': task-2052418, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.807281] env[62684]: DEBUG nova.compute.manager [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1757.807841] env[62684]: DEBUG nova.virt.hardware [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1757.808082] env[62684]: DEBUG nova.virt.hardware [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1757.808247] env[62684]: DEBUG nova.virt.hardware [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1757.808434] env[62684]: DEBUG nova.virt.hardware [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1757.809323] env[62684]: DEBUG nova.virt.hardware [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1757.810318] env[62684]: DEBUG nova.virt.hardware [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1757.810318] env[62684]: DEBUG nova.virt.hardware [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1757.810318] env[62684]: DEBUG nova.virt.hardware [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1757.810569] env[62684]: DEBUG nova.virt.hardware [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1757.811366] env[62684]: DEBUG nova.virt.hardware [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1757.811366] env[62684]: DEBUG nova.virt.hardware [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1757.811732] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0b4dda2-89c0-4121-bd09-93dd5bd6c105 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.823822] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b3f143c-8a4c-44f1-9a2d-fe8362df7a2b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.871713] env[62684]: DEBUG oslo_concurrency.lockutils [req-37952bd6-66fe-465f-ac01-8185126ccb40 req-f1cf7f98-7211-4a23-8dc7-de0040080acf service nova] Releasing lock "refresh_cache-dcb0a5b2-379e-44ff-a9b0-be615943c94e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1757.971382] env[62684]: DEBUG oslo_vmware.api [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]524047eb-b682-6240-d369-d725c77a42cc, 'name': SearchDatastore_Task, 'duration_secs': 0.031102} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.972243] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-babf8b0e-8d32-409e-a190-6555d8e92d1e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.981460] env[62684]: DEBUG oslo_vmware.api [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1757.981460] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]522ad4a0-341e-c5b9-ac3e-2503fa0d77f3" [ 1757.981460] env[62684]: _type = "Task" [ 1757.981460] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1757.991237] env[62684]: DEBUG oslo_vmware.api [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]522ad4a0-341e-c5b9-ac3e-2503fa0d77f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.020183] env[62684]: DEBUG oslo_concurrency.lockutils [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1758.042246] env[62684]: DEBUG nova.network.neutron [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1758.149090] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d72434e-e9b5-4923-bb96-c9f240102a6b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.158979] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc3f3abe-f668-4059-841f-588d389990e4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.192278] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Creating Snapshot of the VM instance {{(pid=62684) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1758.196449] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-1aacd239-9267-48b4-8f4d-05c28f6bd971 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.199420] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb765f95-e211-4d79-9059-535c6daebb9a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.942073] env[62684]: DEBUG nova.network.neutron [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Successfully created port: d94a0e39-627d-4191-9011-76da63ed1d8f {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1758.948687] env[62684]: DEBUG nova.compute.manager [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1758.952013] env[62684]: DEBUG nova.compute.manager [req-1023fed2-3b87-4f2f-9fc3-4af1c99d8ce0 req-fc8e1a02-c945-456a-bb00-0e2f92207d5e service nova] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Received event network-changed-f8d1bd12-b449-41ef-bd95-755f619b639a {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1758.952224] env[62684]: DEBUG nova.compute.manager [req-1023fed2-3b87-4f2f-9fc3-4af1c99d8ce0 req-fc8e1a02-c945-456a-bb00-0e2f92207d5e service nova] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Refreshing instance network info cache due to event network-changed-f8d1bd12-b449-41ef-bd95-755f619b639a. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1758.952442] env[62684]: DEBUG oslo_concurrency.lockutils [req-1023fed2-3b87-4f2f-9fc3-4af1c99d8ce0 req-fc8e1a02-c945-456a-bb00-0e2f92207d5e service nova] Acquiring lock "refresh_cache-d532b5fa-90a3-4f25-8684-4eabaa432c86" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1758.952585] env[62684]: DEBUG oslo_concurrency.lockutils [req-1023fed2-3b87-4f2f-9fc3-4af1c99d8ce0 req-fc8e1a02-c945-456a-bb00-0e2f92207d5e service nova] Acquired lock "refresh_cache-d532b5fa-90a3-4f25-8684-4eabaa432c86" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1758.952770] env[62684]: DEBUG nova.network.neutron [req-1023fed2-3b87-4f2f-9fc3-4af1c99d8ce0 req-fc8e1a02-c945-456a-bb00-0e2f92207d5e service nova] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Refreshing network info cache for port f8d1bd12-b449-41ef-bd95-755f619b639a {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1758.967075] env[62684]: DEBUG oslo_vmware.api [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Waiting for the task: (returnval){ [ 1758.967075] env[62684]: value = "task-2052419" [ 1758.967075] env[62684]: _type = "Task" [ 1758.967075] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1758.975680] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8316355e-1a40-4d4b-baaa-42db2fd10172 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.988725] env[62684]: DEBUG oslo_vmware.api [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': task-2052418, 'name': PowerOnVM_Task, 'duration_secs': 1.034869} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1758.988966] env[62684]: DEBUG oslo_vmware.api [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]522ad4a0-341e-c5b9-ac3e-2503fa0d77f3, 'name': SearchDatastore_Task, 'duration_secs': 0.016496} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1758.990073] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1758.990293] env[62684]: INFO nova.compute.manager [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Took 10.56 seconds to spawn the instance on the hypervisor. [ 1758.990551] env[62684]: DEBUG nova.compute.manager [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1758.990892] env[62684]: DEBUG oslo_concurrency.lockutils [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1758.991082] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] dcb0a5b2-379e-44ff-a9b0-be615943c94e/dcb0a5b2-379e-44ff-a9b0-be615943c94e.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1758.992205] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a9ed997-c0aa-45bf-b3ce-c7fb962dd7ee {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.995251] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d9675903-7c55-4881-99a8-d10b7b151afe {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.010902] env[62684]: DEBUG nova.virt.hardware [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1759.011754] env[62684]: DEBUG nova.virt.hardware [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1759.011754] env[62684]: DEBUG nova.virt.hardware [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1759.011754] env[62684]: DEBUG nova.virt.hardware [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1759.011754] env[62684]: DEBUG nova.virt.hardware [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1759.012067] env[62684]: DEBUG nova.virt.hardware [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1759.012067] env[62684]: DEBUG nova.virt.hardware [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1759.012435] env[62684]: DEBUG nova.virt.hardware [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1759.012638] env[62684]: DEBUG nova.virt.hardware [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1759.012806] env[62684]: DEBUG nova.virt.hardware [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1759.012984] env[62684]: DEBUG nova.virt.hardware [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1759.013865] env[62684]: DEBUG oslo_vmware.api [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052419, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.014326] env[62684]: DEBUG nova.compute.provider_tree [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1759.018034] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82897393-7e53-4ffb-9dd2-43d41601b6dc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.019458] env[62684]: DEBUG nova.network.neutron [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Updating instance_info_cache with network_info: [{"id": "c0047526-de96-4c14-8230-e69c53c790af", "address": "fa:16:3e:80:f4:14", "network": {"id": "5c74d1ef-f900-4b04-bac5-bfa28ffd8537", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-2074219577-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c873948cf2a646008a7fffc544a6a8fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ef746c57-cd18-4883-a0e9-c52937aaf41d", "external-id": "nsx-vlan-transportzone-863", "segmentation_id": 863, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0047526-de", "ovs_interfaceid": "c0047526-de96-4c14-8230-e69c53c790af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1759.032878] env[62684]: DEBUG oslo_vmware.api [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1759.032878] env[62684]: value = "task-2052420" [ 1759.032878] env[62684]: _type = "Task" [ 1759.032878] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1759.034359] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25d44d81-1394-4e26-ac16-372c4f0bf6b4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.056298] env[62684]: DEBUG oslo_vmware.api [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052420, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.468419] env[62684]: DEBUG oslo_concurrency.lockutils [None req-05195ea6-e22f-4c68-86fd-f0dbbab0d69c tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Acquiring lock "e1540aa6-12a4-4cff-a444-d47ee66c78d7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.468599] env[62684]: DEBUG oslo_concurrency.lockutils [None req-05195ea6-e22f-4c68-86fd-f0dbbab0d69c tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Lock "e1540aa6-12a4-4cff-a444-d47ee66c78d7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1759.468819] env[62684]: DEBUG oslo_concurrency.lockutils [None req-05195ea6-e22f-4c68-86fd-f0dbbab0d69c tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Acquiring lock "e1540aa6-12a4-4cff-a444-d47ee66c78d7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.469156] env[62684]: DEBUG oslo_concurrency.lockutils [None req-05195ea6-e22f-4c68-86fd-f0dbbab0d69c tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Lock "e1540aa6-12a4-4cff-a444-d47ee66c78d7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1759.469370] env[62684]: DEBUG oslo_concurrency.lockutils [None req-05195ea6-e22f-4c68-86fd-f0dbbab0d69c tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Lock "e1540aa6-12a4-4cff-a444-d47ee66c78d7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1759.471676] env[62684]: INFO nova.compute.manager [None req-05195ea6-e22f-4c68-86fd-f0dbbab0d69c tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Terminating instance [ 1759.476476] env[62684]: DEBUG oslo_concurrency.lockutils [None req-05195ea6-e22f-4c68-86fd-f0dbbab0d69c tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Acquiring lock "refresh_cache-e1540aa6-12a4-4cff-a444-d47ee66c78d7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1759.476656] env[62684]: DEBUG oslo_concurrency.lockutils [None req-05195ea6-e22f-4c68-86fd-f0dbbab0d69c tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Acquired lock "refresh_cache-e1540aa6-12a4-4cff-a444-d47ee66c78d7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1759.476836] env[62684]: DEBUG nova.network.neutron [None req-05195ea6-e22f-4c68-86fd-f0dbbab0d69c tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1759.484245] env[62684]: DEBUG oslo_vmware.api [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052419, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.523343] env[62684]: DEBUG nova.scheduler.client.report [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1759.526455] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Releasing lock "refresh_cache-5bc73032-45f9-4b5c-a4ea-e07c48e4f82b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1759.526747] env[62684]: DEBUG nova.compute.manager [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Instance network_info: |[{"id": "c0047526-de96-4c14-8230-e69c53c790af", "address": "fa:16:3e:80:f4:14", "network": {"id": "5c74d1ef-f900-4b04-bac5-bfa28ffd8537", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-2074219577-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c873948cf2a646008a7fffc544a6a8fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ef746c57-cd18-4883-a0e9-c52937aaf41d", "external-id": "nsx-vlan-transportzone-863", "segmentation_id": 863, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0047526-de", "ovs_interfaceid": "c0047526-de96-4c14-8230-e69c53c790af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1759.527390] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:80:f4:14', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ef746c57-cd18-4883-a0e9-c52937aaf41d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c0047526-de96-4c14-8230-e69c53c790af', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1759.534795] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Creating folder: Project (c873948cf2a646008a7fffc544a6a8fd). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1759.539163] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8beb7605-9407-4a52-8c53-41cbbde3d481 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.547195] env[62684]: INFO nova.compute.manager [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Took 26.44 seconds to build instance. [ 1759.560538] env[62684]: DEBUG oslo_vmware.api [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052420, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.562017] env[62684]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1759.562017] env[62684]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62684) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1759.563269] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Folder already exists: Project (c873948cf2a646008a7fffc544a6a8fd). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1759.563529] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Creating folder: Instances. Parent ref: group-v421119. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1759.566257] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8bf4846e-978a-4d1c-a966-4ecdcc9ec9b9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.578645] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Created folder: Instances in parent group-v421119. [ 1759.579086] env[62684]: DEBUG oslo.service.loopingcall [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1759.579159] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1759.579348] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a374382a-be5d-45f0-8014-c352025f7b8b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.601300] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1759.601300] env[62684]: value = "task-2052423" [ 1759.601300] env[62684]: _type = "Task" [ 1759.601300] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1759.611841] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052423, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.760950] env[62684]: DEBUG nova.network.neutron [req-1023fed2-3b87-4f2f-9fc3-4af1c99d8ce0 req-fc8e1a02-c945-456a-bb00-0e2f92207d5e service nova] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Updated VIF entry in instance network info cache for port f8d1bd12-b449-41ef-bd95-755f619b639a. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1759.761388] env[62684]: DEBUG nova.network.neutron [req-1023fed2-3b87-4f2f-9fc3-4af1c99d8ce0 req-fc8e1a02-c945-456a-bb00-0e2f92207d5e service nova] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Updating instance_info_cache with network_info: [{"id": "f8d1bd12-b449-41ef-bd95-755f619b639a", "address": "fa:16:3e:56:d6:c1", "network": {"id": "27f7ef6a-6ec1-4b90-962b-11f871569e37", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-434465261-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "949b8296172b4f4aab8bd28c8f4a03d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9f430b-e6f7-4a47-abd0-3cc7bef3e97c", "external-id": "nsx-vlan-transportzone-977", "segmentation_id": 977, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8d1bd12-b4", "ovs_interfaceid": "f8d1bd12-b449-41ef-bd95-755f619b639a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1759.850338] env[62684]: DEBUG nova.compute.manager [req-6d77457a-9dfb-48cf-b79f-bafd0bbeeb33 req-60823617-2750-4fa3-b856-be2743e0a1b2 service nova] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Received event network-vif-plugged-c0047526-de96-4c14-8230-e69c53c790af {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1759.850338] env[62684]: DEBUG oslo_concurrency.lockutils [req-6d77457a-9dfb-48cf-b79f-bafd0bbeeb33 req-60823617-2750-4fa3-b856-be2743e0a1b2 service nova] Acquiring lock "5bc73032-45f9-4b5c-a4ea-e07c48e4f82b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.850478] env[62684]: DEBUG oslo_concurrency.lockutils [req-6d77457a-9dfb-48cf-b79f-bafd0bbeeb33 req-60823617-2750-4fa3-b856-be2743e0a1b2 service nova] Lock "5bc73032-45f9-4b5c-a4ea-e07c48e4f82b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1759.850645] env[62684]: DEBUG oslo_concurrency.lockutils [req-6d77457a-9dfb-48cf-b79f-bafd0bbeeb33 req-60823617-2750-4fa3-b856-be2743e0a1b2 service nova] Lock "5bc73032-45f9-4b5c-a4ea-e07c48e4f82b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1759.850856] env[62684]: DEBUG nova.compute.manager [req-6d77457a-9dfb-48cf-b79f-bafd0bbeeb33 req-60823617-2750-4fa3-b856-be2743e0a1b2 service nova] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] No waiting events found dispatching network-vif-plugged-c0047526-de96-4c14-8230-e69c53c790af {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1759.851216] env[62684]: WARNING nova.compute.manager [req-6d77457a-9dfb-48cf-b79f-bafd0bbeeb33 req-60823617-2750-4fa3-b856-be2743e0a1b2 service nova] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Received unexpected event network-vif-plugged-c0047526-de96-4c14-8230-e69c53c790af for instance with vm_state building and task_state spawning. [ 1759.851442] env[62684]: DEBUG nova.compute.manager [req-6d77457a-9dfb-48cf-b79f-bafd0bbeeb33 req-60823617-2750-4fa3-b856-be2743e0a1b2 service nova] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Received event network-changed-c0047526-de96-4c14-8230-e69c53c790af {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1759.851608] env[62684]: DEBUG nova.compute.manager [req-6d77457a-9dfb-48cf-b79f-bafd0bbeeb33 req-60823617-2750-4fa3-b856-be2743e0a1b2 service nova] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Refreshing instance network info cache due to event network-changed-c0047526-de96-4c14-8230-e69c53c790af. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1759.851798] env[62684]: DEBUG oslo_concurrency.lockutils [req-6d77457a-9dfb-48cf-b79f-bafd0bbeeb33 req-60823617-2750-4fa3-b856-be2743e0a1b2 service nova] Acquiring lock "refresh_cache-5bc73032-45f9-4b5c-a4ea-e07c48e4f82b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1759.851938] env[62684]: DEBUG oslo_concurrency.lockutils [req-6d77457a-9dfb-48cf-b79f-bafd0bbeeb33 req-60823617-2750-4fa3-b856-be2743e0a1b2 service nova] Acquired lock "refresh_cache-5bc73032-45f9-4b5c-a4ea-e07c48e4f82b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1759.852129] env[62684]: DEBUG nova.network.neutron [req-6d77457a-9dfb-48cf-b79f-bafd0bbeeb33 req-60823617-2750-4fa3-b856-be2743e0a1b2 service nova] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Refreshing network info cache for port c0047526-de96-4c14-8230-e69c53c790af {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1759.984021] env[62684]: DEBUG oslo_vmware.api [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052419, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.999341] env[62684]: DEBUG nova.network.neutron [None req-05195ea6-e22f-4c68-86fd-f0dbbab0d69c tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1760.028813] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.418s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1760.029621] env[62684]: DEBUG nova.compute.manager [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1760.033414] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f44e6c81-d244-4f2b-ad94-e5a0c430fda7 tempest-DeleteServersAdminTestJSON-784470396 tempest-DeleteServersAdminTestJSON-784470396-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.916s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1760.034404] env[62684]: DEBUG nova.objects.instance [None req-f44e6c81-d244-4f2b-ad94-e5a0c430fda7 tempest-DeleteServersAdminTestJSON-784470396 tempest-DeleteServersAdminTestJSON-784470396-project-admin] Lazy-loading 'resources' on Instance uuid 4cbcfa1a-c034-4de7-ad25-4ad22316067e {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1760.050165] env[62684]: DEBUG oslo_concurrency.lockutils [None req-56ed0acb-bba3-4abe-be59-94a2ba426fde tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Lock "73f27fc0-ebae-41c7-b292-14396f79a5a2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.878s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1760.057465] env[62684]: DEBUG oslo_vmware.api [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052420, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.528523} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.058034] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] dcb0a5b2-379e-44ff-a9b0-be615943c94e/dcb0a5b2-379e-44ff-a9b0-be615943c94e.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1760.058372] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1760.058675] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a573fc45-521a-48f7-8471-e374fd068c9b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.069640] env[62684]: DEBUG nova.network.neutron [None req-05195ea6-e22f-4c68-86fd-f0dbbab0d69c tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1760.073452] env[62684]: DEBUG oslo_vmware.api [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1760.073452] env[62684]: value = "task-2052424" [ 1760.073452] env[62684]: _type = "Task" [ 1760.073452] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.084511] env[62684]: DEBUG oslo_vmware.api [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052424, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.112854] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052423, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.264369] env[62684]: DEBUG oslo_concurrency.lockutils [req-1023fed2-3b87-4f2f-9fc3-4af1c99d8ce0 req-fc8e1a02-c945-456a-bb00-0e2f92207d5e service nova] Releasing lock "refresh_cache-d532b5fa-90a3-4f25-8684-4eabaa432c86" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1760.481674] env[62684]: DEBUG oslo_vmware.api [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052419, 'name': CreateSnapshot_Task, 'duration_secs': 1.136353} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.481950] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Created Snapshot of the VM instance {{(pid=62684) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1760.482731] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eba8c143-6e74-44c7-a602-042da1c49101 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.539692] env[62684]: DEBUG nova.compute.utils [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1760.547394] env[62684]: DEBUG nova.compute.manager [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1760.547556] env[62684]: DEBUG nova.network.neutron [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1760.553382] env[62684]: DEBUG nova.compute.manager [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1760.561887] env[62684]: DEBUG nova.network.neutron [req-6d77457a-9dfb-48cf-b79f-bafd0bbeeb33 req-60823617-2750-4fa3-b856-be2743e0a1b2 service nova] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Updated VIF entry in instance network info cache for port c0047526-de96-4c14-8230-e69c53c790af. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1760.562262] env[62684]: DEBUG nova.network.neutron [req-6d77457a-9dfb-48cf-b79f-bafd0bbeeb33 req-60823617-2750-4fa3-b856-be2743e0a1b2 service nova] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Updating instance_info_cache with network_info: [{"id": "c0047526-de96-4c14-8230-e69c53c790af", "address": "fa:16:3e:80:f4:14", "network": {"id": "5c74d1ef-f900-4b04-bac5-bfa28ffd8537", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-2074219577-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c873948cf2a646008a7fffc544a6a8fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ef746c57-cd18-4883-a0e9-c52937aaf41d", "external-id": "nsx-vlan-transportzone-863", "segmentation_id": 863, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0047526-de", "ovs_interfaceid": "c0047526-de96-4c14-8230-e69c53c790af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1760.572881] env[62684]: DEBUG oslo_concurrency.lockutils [None req-05195ea6-e22f-4c68-86fd-f0dbbab0d69c tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Releasing lock "refresh_cache-e1540aa6-12a4-4cff-a444-d47ee66c78d7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1760.573331] env[62684]: DEBUG nova.compute.manager [None req-05195ea6-e22f-4c68-86fd-f0dbbab0d69c tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1760.574045] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-05195ea6-e22f-4c68-86fd-f0dbbab0d69c tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1760.578061] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec095e69-4474-4482-8f4b-83543b89f836 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.591019] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-05195ea6-e22f-4c68-86fd-f0dbbab0d69c tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1760.594218] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1fde06f1-f441-497f-ba68-3cc2d517ef61 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.596112] env[62684]: DEBUG oslo_vmware.api [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052424, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.151743} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.598132] env[62684]: DEBUG nova.policy [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4f9ace4d78b94a3db9eb74236fca1e6a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aef5d7061c834332b9f9c5c75596bf08', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1760.602526] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1760.602636] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9141eb81-bf12-4d64-a4e5-553fc8a3a4b3 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Acquiring lock "interface-43d28811-26e4-4016-9f82-98349d4a05b7-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1760.602817] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9141eb81-bf12-4d64-a4e5-553fc8a3a4b3 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Lock "interface-43d28811-26e4-4016-9f82-98349d4a05b7-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1760.603157] env[62684]: DEBUG nova.objects.instance [None req-9141eb81-bf12-4d64-a4e5-553fc8a3a4b3 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Lazy-loading 'flavor' on Instance uuid 43d28811-26e4-4016-9f82-98349d4a05b7 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1760.608195] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b77b09b-8493-4c5a-8e36-a141128916af {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.613396] env[62684]: DEBUG oslo_vmware.api [None req-05195ea6-e22f-4c68-86fd-f0dbbab0d69c tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Waiting for the task: (returnval){ [ 1760.613396] env[62684]: value = "task-2052425" [ 1760.613396] env[62684]: _type = "Task" [ 1760.613396] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.641054] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Reconfiguring VM instance instance-0000000f to attach disk [datastore1] dcb0a5b2-379e-44ff-a9b0-be615943c94e/dcb0a5b2-379e-44ff-a9b0-be615943c94e.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1760.641344] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052423, 'name': CreateVM_Task, 'duration_secs': 0.527649} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.642921] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-001a2f88-eeb4-4b75-a353-0a818de6f91a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.657867] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1760.664306] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': True, 'disk_bus': None, 'boot_index': 0, 'device_type': None, 'attachment_id': 'f2877eb4-dd2d-4df8-91a2-806984307811', 'guest_format': None, 'mount_device': '/dev/sda', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421128', 'volume_id': '34523d13-ed90-416e-a19a-57c837136d21', 'name': 'volume-34523d13-ed90-416e-a19a-57c837136d21', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5bc73032-45f9-4b5c-a4ea-e07c48e4f82b', 'attached_at': '', 'detached_at': '', 'volume_id': '34523d13-ed90-416e-a19a-57c837136d21', 'serial': '34523d13-ed90-416e-a19a-57c837136d21'}, 'volume_type': None}], 'swap': None} {{(pid=62684) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1760.664539] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Root volume attach. Driver type: vmdk {{(pid=62684) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1760.664888] env[62684]: DEBUG oslo_vmware.api [None req-05195ea6-e22f-4c68-86fd-f0dbbab0d69c tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Task: {'id': task-2052425, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.666351] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e722361-0013-46b2-b381-c599b80244b2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.671991] env[62684]: DEBUG oslo_vmware.api [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1760.671991] env[62684]: value = "task-2052426" [ 1760.671991] env[62684]: _type = "Task" [ 1760.671991] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.678616] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52b07594-004d-4a8a-9a5a-22aaec98f2dd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.691467] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45f61531-da7b-4c7d-b90e-15d439db24ca {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.694275] env[62684]: DEBUG oslo_vmware.api [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052426, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.702503] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-7e02271c-1979-4d3d-b4f3-868abad502be {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.713016] env[62684]: DEBUG oslo_vmware.api [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Waiting for the task: (returnval){ [ 1760.713016] env[62684]: value = "task-2052427" [ 1760.713016] env[62684]: _type = "Task" [ 1760.713016] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.743315] env[62684]: DEBUG oslo_vmware.api [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Task: {'id': task-2052427, 'name': RelocateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.872146] env[62684]: DEBUG oslo_concurrency.lockutils [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Acquiring lock "3a172e9f-9f79-489e-9571-80bd74ad8609" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1760.872438] env[62684]: DEBUG oslo_concurrency.lockutils [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Lock "3a172e9f-9f79-489e-9571-80bd74ad8609" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1761.002019] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Creating linked-clone VM from snapshot {{(pid=62684) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1761.002382] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-02ae1e21-34ff-44b8-ae55-39402f6fad0e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.013530] env[62684]: DEBUG oslo_vmware.api [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Waiting for the task: (returnval){ [ 1761.013530] env[62684]: value = "task-2052428" [ 1761.013530] env[62684]: _type = "Task" [ 1761.013530] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1761.023265] env[62684]: DEBUG oslo_vmware.api [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052428, 'name': CloneVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.051404] env[62684]: DEBUG nova.compute.manager [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1761.064648] env[62684]: DEBUG oslo_concurrency.lockutils [req-6d77457a-9dfb-48cf-b79f-bafd0bbeeb33 req-60823617-2750-4fa3-b856-be2743e0a1b2 service nova] Releasing lock "refresh_cache-5bc73032-45f9-4b5c-a4ea-e07c48e4f82b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1761.078380] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1761.089107] env[62684]: DEBUG nova.network.neutron [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Successfully updated port: d94a0e39-627d-4191-9011-76da63ed1d8f {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1761.101507] env[62684]: DEBUG nova.network.neutron [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Successfully created port: b5cb1869-ace8-44cb-bd59-60e4ce4e95ad {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1761.115678] env[62684]: DEBUG nova.objects.instance [None req-9141eb81-bf12-4d64-a4e5-553fc8a3a4b3 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Lazy-loading 'pci_requests' on Instance uuid 43d28811-26e4-4016-9f82-98349d4a05b7 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1761.130883] env[62684]: DEBUG oslo_vmware.api [None req-05195ea6-e22f-4c68-86fd-f0dbbab0d69c tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Task: {'id': task-2052425, 'name': PowerOffVM_Task, 'duration_secs': 0.208575} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1761.130883] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-05195ea6-e22f-4c68-86fd-f0dbbab0d69c tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1761.130883] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-05195ea6-e22f-4c68-86fd-f0dbbab0d69c tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1761.132031] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1f3b12ce-8d20-4ce3-bbe5-bfc936c6e0c1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.136043] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d67a910a-9074-40e2-9f8e-d94b944da8a3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.145766] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8109e65-bd97-4b9a-be3d-e27f282cf633 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.188443] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60e516a4-b90d-4f9c-bb51-6b6a4ca82d23 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.192564] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-05195ea6-e22f-4c68-86fd-f0dbbab0d69c tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1761.192844] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-05195ea6-e22f-4c68-86fd-f0dbbab0d69c tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1761.193017] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-05195ea6-e22f-4c68-86fd-f0dbbab0d69c tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Deleting the datastore file [datastore1] e1540aa6-12a4-4cff-a444-d47ee66c78d7 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1761.194046] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a4453b00-bde6-4040-8dc0-a997c927bc40 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.203134] env[62684]: DEBUG oslo_vmware.api [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052426, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.206030] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-488bffc7-2be0-4fb2-93f8-cf717afb1298 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.210631] env[62684]: DEBUG oslo_vmware.api [None req-05195ea6-e22f-4c68-86fd-f0dbbab0d69c tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Waiting for the task: (returnval){ [ 1761.210631] env[62684]: value = "task-2052430" [ 1761.210631] env[62684]: _type = "Task" [ 1761.210631] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1761.227153] env[62684]: DEBUG nova.compute.provider_tree [None req-f44e6c81-d244-4f2b-ad94-e5a0c430fda7 tempest-DeleteServersAdminTestJSON-784470396 tempest-DeleteServersAdminTestJSON-784470396-project-admin] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1761.239387] env[62684]: DEBUG oslo_vmware.api [None req-05195ea6-e22f-4c68-86fd-f0dbbab0d69c tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Task: {'id': task-2052430, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.242818] env[62684]: DEBUG oslo_vmware.api [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Task: {'id': task-2052427, 'name': RelocateVM_Task} progress is 34%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.310572] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquiring lock "6d4061e4-a074-445d-95c5-239014ee87f3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1761.310914] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lock "6d4061e4-a074-445d-95c5-239014ee87f3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1761.354962] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquiring lock "52839b18-a68a-4ec7-a921-c42454955e82" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1761.355578] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lock "52839b18-a68a-4ec7-a921-c42454955e82" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1761.528757] env[62684]: DEBUG oslo_vmware.api [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052428, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.593931] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Acquiring lock "refresh_cache-aec16a15-5d75-4ea6-800b-1bf67f762d89" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1761.593931] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Acquired lock "refresh_cache-aec16a15-5d75-4ea6-800b-1bf67f762d89" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1761.593931] env[62684]: DEBUG nova.network.neutron [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1761.621027] env[62684]: DEBUG nova.objects.base [None req-9141eb81-bf12-4d64-a4e5-553fc8a3a4b3 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Object Instance<43d28811-26e4-4016-9f82-98349d4a05b7> lazy-loaded attributes: flavor,pci_requests {{(pid=62684) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1761.621027] env[62684]: DEBUG nova.network.neutron [None req-9141eb81-bf12-4d64-a4e5-553fc8a3a4b3 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1761.699504] env[62684]: DEBUG oslo_vmware.api [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052426, 'name': ReconfigVM_Task, 'duration_secs': 0.687461} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1761.700027] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Reconfigured VM instance instance-0000000f to attach disk [datastore1] dcb0a5b2-379e-44ff-a9b0-be615943c94e/dcb0a5b2-379e-44ff-a9b0-be615943c94e.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1761.701062] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1dbb8a7d-fda6-4a1f-b151-e705dfc444b0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.719025] env[62684]: DEBUG oslo_vmware.api [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1761.719025] env[62684]: value = "task-2052431" [ 1761.719025] env[62684]: _type = "Task" [ 1761.719025] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1761.734475] env[62684]: DEBUG nova.scheduler.client.report [None req-f44e6c81-d244-4f2b-ad94-e5a0c430fda7 tempest-DeleteServersAdminTestJSON-784470396 tempest-DeleteServersAdminTestJSON-784470396-project-admin] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1761.745548] env[62684]: DEBUG oslo_vmware.api [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Task: {'id': task-2052427, 'name': RelocateVM_Task} progress is 45%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.755206] env[62684]: DEBUG oslo_vmware.api [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052431, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.755206] env[62684]: DEBUG oslo_vmware.api [None req-05195ea6-e22f-4c68-86fd-f0dbbab0d69c tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Task: {'id': task-2052430, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164343} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1761.757583] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-05195ea6-e22f-4c68-86fd-f0dbbab0d69c tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1761.757987] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-05195ea6-e22f-4c68-86fd-f0dbbab0d69c tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1761.758346] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-05195ea6-e22f-4c68-86fd-f0dbbab0d69c tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1761.758645] env[62684]: INFO nova.compute.manager [None req-05195ea6-e22f-4c68-86fd-f0dbbab0d69c tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1761.759058] env[62684]: DEBUG oslo.service.loopingcall [None req-05195ea6-e22f-4c68-86fd-f0dbbab0d69c tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1761.762829] env[62684]: DEBUG nova.compute.manager [-] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1761.762829] env[62684]: DEBUG nova.network.neutron [-] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1761.787926] env[62684]: DEBUG nova.network.neutron [-] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1761.839676] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9141eb81-bf12-4d64-a4e5-553fc8a3a4b3 tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Lock "interface-43d28811-26e4-4016-9f82-98349d4a05b7-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.236s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1762.030530] env[62684]: DEBUG oslo_vmware.api [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052428, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.063193] env[62684]: DEBUG nova.compute.manager [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1762.099349] env[62684]: DEBUG nova.virt.hardware [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1762.099615] env[62684]: DEBUG nova.virt.hardware [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1762.100237] env[62684]: DEBUG nova.virt.hardware [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1762.100237] env[62684]: DEBUG nova.virt.hardware [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1762.100237] env[62684]: DEBUG nova.virt.hardware [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1762.100390] env[62684]: DEBUG nova.virt.hardware [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1762.100727] env[62684]: DEBUG nova.virt.hardware [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1762.100727] env[62684]: DEBUG nova.virt.hardware [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1762.101078] env[62684]: DEBUG nova.virt.hardware [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1762.101299] env[62684]: DEBUG nova.virt.hardware [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1762.101483] env[62684]: DEBUG nova.virt.hardware [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1762.102375] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f28aa0e-da60-423f-83aa-c054516d8e7a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.114771] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eefd389f-594d-4527-83ce-734a99dfd35e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.138373] env[62684]: DEBUG nova.network.neutron [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1762.231228] env[62684]: DEBUG oslo_vmware.api [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052431, 'name': Rename_Task, 'duration_secs': 0.220695} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1762.234802] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1762.235187] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5937b8f9-f822-4d6b-b5f1-a99d06c97f0c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.239784] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f44e6c81-d244-4f2b-ad94-e5a0c430fda7 tempest-DeleteServersAdminTestJSON-784470396 tempest-DeleteServersAdminTestJSON-784470396-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.206s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1762.247538] env[62684]: DEBUG oslo_concurrency.lockutils [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 21.140s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1762.248787] env[62684]: DEBUG oslo_vmware.api [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Task: {'id': task-2052427, 'name': RelocateVM_Task} progress is 58%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.250092] env[62684]: DEBUG oslo_vmware.api [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1762.250092] env[62684]: value = "task-2052432" [ 1762.250092] env[62684]: _type = "Task" [ 1762.250092] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1762.269440] env[62684]: DEBUG oslo_vmware.api [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052432, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.272780] env[62684]: INFO nova.scheduler.client.report [None req-f44e6c81-d244-4f2b-ad94-e5a0c430fda7 tempest-DeleteServersAdminTestJSON-784470396 tempest-DeleteServersAdminTestJSON-784470396-project-admin] Deleted allocations for instance 4cbcfa1a-c034-4de7-ad25-4ad22316067e [ 1762.293343] env[62684]: DEBUG nova.network.neutron [-] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1762.392338] env[62684]: DEBUG nova.network.neutron [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Updating instance_info_cache with network_info: [{"id": "d94a0e39-627d-4191-9011-76da63ed1d8f", "address": "fa:16:3e:2b:4b:d0", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.143", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd94a0e39-62", "ovs_interfaceid": "d94a0e39-627d-4191-9011-76da63ed1d8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1762.531839] env[62684]: DEBUG oslo_vmware.api [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052428, 'name': CloneVM_Task, 'duration_secs': 1.488951} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1762.532146] env[62684]: INFO nova.virt.vmwareapi.vmops [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Created linked-clone VM from snapshot [ 1762.533157] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7fecf26-d933-4759-b44d-c1f904f1d3fe {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.547470] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Uploading image 500c73cc-1525-4d2d-8617-14b9836332de {{(pid=62684) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1762.577531] env[62684]: DEBUG oslo_vmware.rw_handles [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1762.577531] env[62684]: value = "vm-421175" [ 1762.577531] env[62684]: _type = "VirtualMachine" [ 1762.577531] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1762.577949] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-54512f53-8927-4cda-87ce-cf4991204e7d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.590854] env[62684]: DEBUG oslo_vmware.rw_handles [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Lease: (returnval){ [ 1762.590854] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5291ec0e-359f-f242-acb3-63ba759376f6" [ 1762.590854] env[62684]: _type = "HttpNfcLease" [ 1762.590854] env[62684]: } obtained for exporting VM: (result){ [ 1762.590854] env[62684]: value = "vm-421175" [ 1762.590854] env[62684]: _type = "VirtualMachine" [ 1762.590854] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1762.591360] env[62684]: DEBUG oslo_vmware.api [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Waiting for the lease: (returnval){ [ 1762.591360] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5291ec0e-359f-f242-acb3-63ba759376f6" [ 1762.591360] env[62684]: _type = "HttpNfcLease" [ 1762.591360] env[62684]: } to be ready. {{(pid=62684) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1762.603224] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1762.603224] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5291ec0e-359f-f242-acb3-63ba759376f6" [ 1762.603224] env[62684]: _type = "HttpNfcLease" [ 1762.603224] env[62684]: } is initializing. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1762.692756] env[62684]: DEBUG oslo_concurrency.lockutils [None req-610ebd80-6d9b-40af-9d21-7178b1ec7cf4 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "8d53d8c3-6db8-4ebe-a35f-0f64602fafcb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1762.693109] env[62684]: DEBUG oslo_concurrency.lockutils [None req-610ebd80-6d9b-40af-9d21-7178b1ec7cf4 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "8d53d8c3-6db8-4ebe-a35f-0f64602fafcb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1762.693448] env[62684]: DEBUG oslo_concurrency.lockutils [None req-610ebd80-6d9b-40af-9d21-7178b1ec7cf4 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "8d53d8c3-6db8-4ebe-a35f-0f64602fafcb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1762.693799] env[62684]: DEBUG oslo_concurrency.lockutils [None req-610ebd80-6d9b-40af-9d21-7178b1ec7cf4 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "8d53d8c3-6db8-4ebe-a35f-0f64602fafcb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1762.693996] env[62684]: DEBUG oslo_concurrency.lockutils [None req-610ebd80-6d9b-40af-9d21-7178b1ec7cf4 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "8d53d8c3-6db8-4ebe-a35f-0f64602fafcb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1762.696641] env[62684]: INFO nova.compute.manager [None req-610ebd80-6d9b-40af-9d21-7178b1ec7cf4 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Terminating instance [ 1762.698779] env[62684]: DEBUG nova.compute.manager [None req-610ebd80-6d9b-40af-9d21-7178b1ec7cf4 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1762.699044] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-610ebd80-6d9b-40af-9d21-7178b1ec7cf4 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1762.699953] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2290ed06-62be-477c-a7f6-21d43ed4647d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.716437] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-610ebd80-6d9b-40af-9d21-7178b1ec7cf4 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1762.717033] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6c61e83b-9e05-417a-8dbf-84116cee5819 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.745498] env[62684]: DEBUG oslo_vmware.api [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Task: {'id': task-2052427, 'name': RelocateVM_Task} progress is 71%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.753949] env[62684]: INFO nova.compute.claims [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1762.770785] env[62684]: DEBUG oslo_vmware.api [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052432, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.784038] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f44e6c81-d244-4f2b-ad94-e5a0c430fda7 tempest-DeleteServersAdminTestJSON-784470396 tempest-DeleteServersAdminTestJSON-784470396-project-admin] Lock "4cbcfa1a-c034-4de7-ad25-4ad22316067e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.581s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1762.799040] env[62684]: INFO nova.compute.manager [-] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Took 1.04 seconds to deallocate network for instance. [ 1762.847023] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-610ebd80-6d9b-40af-9d21-7178b1ec7cf4 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1762.848257] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-610ebd80-6d9b-40af-9d21-7178b1ec7cf4 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1762.848257] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-610ebd80-6d9b-40af-9d21-7178b1ec7cf4 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Deleting the datastore file [datastore2] 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1762.849000] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a6bca698-4955-440a-8740-465f1f8d8157 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.863052] env[62684]: DEBUG oslo_vmware.api [None req-610ebd80-6d9b-40af-9d21-7178b1ec7cf4 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1762.863052] env[62684]: value = "task-2052435" [ 1762.863052] env[62684]: _type = "Task" [ 1762.863052] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1762.879685] env[62684]: DEBUG oslo_vmware.api [None req-610ebd80-6d9b-40af-9d21-7178b1ec7cf4 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052435, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.896568] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Releasing lock "refresh_cache-aec16a15-5d75-4ea6-800b-1bf67f762d89" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1762.896568] env[62684]: DEBUG nova.compute.manager [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Instance network_info: |[{"id": "d94a0e39-627d-4191-9011-76da63ed1d8f", "address": "fa:16:3e:2b:4b:d0", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.143", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd94a0e39-62", "ovs_interfaceid": "d94a0e39-627d-4191-9011-76da63ed1d8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1762.897585] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:4b:d0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ded18042-834c-4792-b3e8-b1c377446432', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd94a0e39-627d-4191-9011-76da63ed1d8f', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1762.908560] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Creating folder: Project (84eadd714b9646489a5093086fb06aef). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1762.909118] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3204f1db-b398-4e6a-8b99-a3b4c0cd3cd7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.928839] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Created folder: Project (84eadd714b9646489a5093086fb06aef) in parent group-v421118. [ 1762.929139] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Creating folder: Instances. Parent ref: group-v421176. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1762.929424] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ce60ed8c-3d4d-43ac-b538-308cefcb5190 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.946075] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Created folder: Instances in parent group-v421176. [ 1762.947055] env[62684]: DEBUG oslo.service.loopingcall [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1762.947055] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1762.947055] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ea34325e-9a7b-443a-886a-b457c389920f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.971556] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1762.971556] env[62684]: value = "task-2052438" [ 1762.971556] env[62684]: _type = "Task" [ 1762.971556] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1762.983689] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052438, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.099360] env[62684]: DEBUG nova.network.neutron [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Successfully updated port: b5cb1869-ace8-44cb-bd59-60e4ce4e95ad {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1763.106082] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1763.106082] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5291ec0e-359f-f242-acb3-63ba759376f6" [ 1763.106082] env[62684]: _type = "HttpNfcLease" [ 1763.106082] env[62684]: } is ready. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1763.106082] env[62684]: DEBUG oslo_vmware.rw_handles [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1763.106082] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5291ec0e-359f-f242-acb3-63ba759376f6" [ 1763.106082] env[62684]: _type = "HttpNfcLease" [ 1763.106082] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1763.106907] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ae526af-9424-41bb-b109-d9186d161ca8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.120563] env[62684]: DEBUG oslo_vmware.rw_handles [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5257eb7a-4d4b-29bd-186e-5ccf1f8bbf31/disk-0.vmdk from lease info. {{(pid=62684) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1763.121135] env[62684]: DEBUG oslo_vmware.rw_handles [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5257eb7a-4d4b-29bd-186e-5ccf1f8bbf31/disk-0.vmdk for reading. {{(pid=62684) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1763.187302] env[62684]: DEBUG nova.compute.manager [req-bd8ed448-280c-4eb2-b7eb-faa35202e5b1 req-4c7a7b44-b6db-4044-aa10-fc9824ecfb88 service nova] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Received event network-vif-plugged-d94a0e39-627d-4191-9011-76da63ed1d8f {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1763.187581] env[62684]: DEBUG oslo_concurrency.lockutils [req-bd8ed448-280c-4eb2-b7eb-faa35202e5b1 req-4c7a7b44-b6db-4044-aa10-fc9824ecfb88 service nova] Acquiring lock "aec16a15-5d75-4ea6-800b-1bf67f762d89-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1763.187808] env[62684]: DEBUG oslo_concurrency.lockutils [req-bd8ed448-280c-4eb2-b7eb-faa35202e5b1 req-4c7a7b44-b6db-4044-aa10-fc9824ecfb88 service nova] Lock "aec16a15-5d75-4ea6-800b-1bf67f762d89-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1763.188027] env[62684]: DEBUG oslo_concurrency.lockutils [req-bd8ed448-280c-4eb2-b7eb-faa35202e5b1 req-4c7a7b44-b6db-4044-aa10-fc9824ecfb88 service nova] Lock "aec16a15-5d75-4ea6-800b-1bf67f762d89-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1763.188173] env[62684]: DEBUG nova.compute.manager [req-bd8ed448-280c-4eb2-b7eb-faa35202e5b1 req-4c7a7b44-b6db-4044-aa10-fc9824ecfb88 service nova] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] No waiting events found dispatching network-vif-plugged-d94a0e39-627d-4191-9011-76da63ed1d8f {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1763.188454] env[62684]: WARNING nova.compute.manager [req-bd8ed448-280c-4eb2-b7eb-faa35202e5b1 req-4c7a7b44-b6db-4044-aa10-fc9824ecfb88 service nova] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Received unexpected event network-vif-plugged-d94a0e39-627d-4191-9011-76da63ed1d8f for instance with vm_state building and task_state spawning. [ 1763.233659] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-9db6941f-dce1-48d1-b8b4-147438564669 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.247781] env[62684]: DEBUG oslo_vmware.api [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Task: {'id': task-2052427, 'name': RelocateVM_Task} progress is 84%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.266667] env[62684]: INFO nova.compute.resource_tracker [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Updating resource usage from migration 96f2bb25-db44-4b5b-b5d0-be024988a0cc [ 1763.289827] env[62684]: DEBUG oslo_vmware.api [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052432, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.305676] env[62684]: DEBUG oslo_concurrency.lockutils [None req-05195ea6-e22f-4c68-86fd-f0dbbab0d69c tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1763.384448] env[62684]: DEBUG oslo_vmware.api [None req-610ebd80-6d9b-40af-9d21-7178b1ec7cf4 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052435, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.427760] env[62684]: DEBUG nova.compute.manager [req-f3d49a98-8848-4901-8de4-a7b28b826a86 req-3f82d50f-fc59-495e-8b78-a2af4353bb4a service nova] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Received event network-changed-30471946-98e4-4413-acb5-8a8190d1dd82 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1763.428360] env[62684]: DEBUG nova.compute.manager [req-f3d49a98-8848-4901-8de4-a7b28b826a86 req-3f82d50f-fc59-495e-8b78-a2af4353bb4a service nova] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Refreshing instance network info cache due to event network-changed-30471946-98e4-4413-acb5-8a8190d1dd82. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1763.428360] env[62684]: DEBUG oslo_concurrency.lockutils [req-f3d49a98-8848-4901-8de4-a7b28b826a86 req-3f82d50f-fc59-495e-8b78-a2af4353bb4a service nova] Acquiring lock "refresh_cache-73f27fc0-ebae-41c7-b292-14396f79a5a2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1763.428563] env[62684]: DEBUG oslo_concurrency.lockutils [req-f3d49a98-8848-4901-8de4-a7b28b826a86 req-3f82d50f-fc59-495e-8b78-a2af4353bb4a service nova] Acquired lock "refresh_cache-73f27fc0-ebae-41c7-b292-14396f79a5a2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1763.428617] env[62684]: DEBUG nova.network.neutron [req-f3d49a98-8848-4901-8de4-a7b28b826a86 req-3f82d50f-fc59-495e-8b78-a2af4353bb4a service nova] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Refreshing network info cache for port 30471946-98e4-4413-acb5-8a8190d1dd82 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1763.496110] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052438, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.604911] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquiring lock "refresh_cache-06751c34-0724-44ba-a263-ad27fcf2920f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1763.608850] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquired lock "refresh_cache-06751c34-0724-44ba-a263-ad27fcf2920f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1763.608850] env[62684]: DEBUG nova.network.neutron [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1763.748956] env[62684]: DEBUG oslo_vmware.api [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Task: {'id': task-2052427, 'name': RelocateVM_Task} progress is 97%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.774883] env[62684]: DEBUG oslo_vmware.api [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052432, 'name': PowerOnVM_Task, 'duration_secs': 1.301384} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1763.777790] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1763.777790] env[62684]: INFO nova.compute.manager [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Took 10.59 seconds to spawn the instance on the hypervisor. [ 1763.777790] env[62684]: DEBUG nova.compute.manager [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1763.777790] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8ca5ad4-e876-4aea-998e-0490b5f15900 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.805313] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e79c227-5a7a-4959-b4e5-d0f293058f8f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.818997] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39dd6cfe-c210-4645-b98c-8a3dff86a19f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.862230] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88d12363-6356-4c1c-8992-e92ad31e3235 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.880880] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64d1e2d6-33d8-4fdf-a067-472668afbe2f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.885455] env[62684]: DEBUG oslo_vmware.api [None req-610ebd80-6d9b-40af-9d21-7178b1ec7cf4 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052435, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.66248} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1763.886179] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-610ebd80-6d9b-40af-9d21-7178b1ec7cf4 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1763.887030] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-610ebd80-6d9b-40af-9d21-7178b1ec7cf4 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1763.887172] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-610ebd80-6d9b-40af-9d21-7178b1ec7cf4 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1763.887475] env[62684]: INFO nova.compute.manager [None req-610ebd80-6d9b-40af-9d21-7178b1ec7cf4 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1763.887778] env[62684]: DEBUG oslo.service.loopingcall [None req-610ebd80-6d9b-40af-9d21-7178b1ec7cf4 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1763.888665] env[62684]: DEBUG nova.compute.manager [-] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1763.888856] env[62684]: DEBUG nova.network.neutron [-] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1763.900481] env[62684]: DEBUG nova.compute.provider_tree [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1763.983915] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052438, 'name': CreateVM_Task, 'duration_secs': 0.541752} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1763.987044] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1763.987875] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1763.988071] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1763.988483] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1763.989169] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b3b2656-7981-42d3-9ea6-f2f6aab5cd22 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.995707] env[62684]: DEBUG oslo_vmware.api [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Waiting for the task: (returnval){ [ 1763.995707] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d6bfb7-ee42-827c-ebdd-2d4a4cdea2f1" [ 1763.995707] env[62684]: _type = "Task" [ 1763.995707] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1764.005869] env[62684]: DEBUG oslo_vmware.api [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d6bfb7-ee42-827c-ebdd-2d4a4cdea2f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.171130] env[62684]: DEBUG nova.network.neutron [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1764.251225] env[62684]: DEBUG oslo_vmware.api [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Task: {'id': task-2052427, 'name': RelocateVM_Task} progress is 98%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.312057] env[62684]: INFO nova.compute.manager [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Took 29.83 seconds to build instance. [ 1764.404654] env[62684]: DEBUG nova.scheduler.client.report [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1764.421155] env[62684]: DEBUG nova.network.neutron [req-f3d49a98-8848-4901-8de4-a7b28b826a86 req-3f82d50f-fc59-495e-8b78-a2af4353bb4a service nova] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Updated VIF entry in instance network info cache for port 30471946-98e4-4413-acb5-8a8190d1dd82. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1764.421155] env[62684]: DEBUG nova.network.neutron [req-f3d49a98-8848-4901-8de4-a7b28b826a86 req-3f82d50f-fc59-495e-8b78-a2af4353bb4a service nova] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Updating instance_info_cache with network_info: [{"id": "30471946-98e4-4413-acb5-8a8190d1dd82", "address": "fa:16:3e:85:31:4c", "network": {"id": "7982dcb9-e661-4690-9931-bf412f4a564e", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1278071472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72ac36eda47d4c51a4b421c764d0404d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9bb629cd-6d0f-4bed-965c-bd04a2f3ec49", "external-id": "nsx-vlan-transportzone-848", "segmentation_id": 848, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30471946-98", "ovs_interfaceid": "30471946-98e4-4413-acb5-8a8190d1dd82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1764.490324] env[62684]: DEBUG nova.network.neutron [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Updating instance_info_cache with network_info: [{"id": "b5cb1869-ace8-44cb-bd59-60e4ce4e95ad", "address": "fa:16:3e:77:50:93", "network": {"id": "bca0ee43-bbb1-483b-9d82-56955369f9b7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1592250106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aef5d7061c834332b9f9c5c75596bf08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bfa7abe-7e46-4d8f-b50a-4d0c4509e4dc", "external-id": "nsx-vlan-transportzone-951", "segmentation_id": 951, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5cb1869-ac", "ovs_interfaceid": "b5cb1869-ace8-44cb-bd59-60e4ce4e95ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1764.512819] env[62684]: DEBUG oslo_vmware.api [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d6bfb7-ee42-827c-ebdd-2d4a4cdea2f1, 'name': SearchDatastore_Task, 'duration_secs': 0.030472} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1764.513603] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1764.514587] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1764.514587] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1764.514587] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1764.514736] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1764.515798] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ee09e966-66a2-401c-88a3-e25773a786ae {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.528707] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1764.529346] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1764.530943] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f9a655e-0e71-491f-900f-7feb08601c43 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.537559] env[62684]: DEBUG oslo_vmware.api [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Waiting for the task: (returnval){ [ 1764.537559] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523494cd-49e4-f74f-b748-7ec3113af044" [ 1764.537559] env[62684]: _type = "Task" [ 1764.537559] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1764.548293] env[62684]: DEBUG oslo_vmware.api [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523494cd-49e4-f74f-b748-7ec3113af044, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.752507] env[62684]: DEBUG oslo_vmware.api [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Task: {'id': task-2052427, 'name': RelocateVM_Task, 'duration_secs': 3.874655} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1764.752948] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Volume attach. Driver type: vmdk {{(pid=62684) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1764.753336] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421128', 'volume_id': '34523d13-ed90-416e-a19a-57c837136d21', 'name': 'volume-34523d13-ed90-416e-a19a-57c837136d21', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5bc73032-45f9-4b5c-a4ea-e07c48e4f82b', 'attached_at': '', 'detached_at': '', 'volume_id': '34523d13-ed90-416e-a19a-57c837136d21', 'serial': '34523d13-ed90-416e-a19a-57c837136d21'} {{(pid=62684) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1764.754242] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac4d1624-cd97-47cc-a209-a9a8b9edca94 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.773330] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b77ddcd4-97a3-42cf-ae5a-4b44da678c95 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.797841] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Reconfiguring VM instance instance-00000010 to attach disk [datastore1] volume-34523d13-ed90-416e-a19a-57c837136d21/volume-34523d13-ed90-416e-a19a-57c837136d21.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1764.798328] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-38bd6973-2e05-4645-93cb-634073c6e911 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.817935] env[62684]: DEBUG oslo_concurrency.lockutils [None req-05b2f797-0897-4dab-bc51-ffe72abb4b9e tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "dcb0a5b2-379e-44ff-a9b0-be615943c94e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.551s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1764.830456] env[62684]: DEBUG oslo_vmware.api [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Waiting for the task: (returnval){ [ 1764.830456] env[62684]: value = "task-2052439" [ 1764.830456] env[62684]: _type = "Task" [ 1764.830456] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1764.843227] env[62684]: DEBUG oslo_vmware.api [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Task: {'id': task-2052439, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.911213] env[62684]: DEBUG oslo_concurrency.lockutils [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.664s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1764.911213] env[62684]: INFO nova.compute.manager [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Migrating [ 1764.911361] env[62684]: DEBUG oslo_concurrency.lockutils [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1764.911710] env[62684]: DEBUG oslo_concurrency.lockutils [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquired lock "compute-rpcapi-router" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1764.913702] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.207s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1764.917201] env[62684]: INFO nova.compute.claims [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1764.924610] env[62684]: INFO nova.compute.rpcapi [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Automatically selected compute RPC version 6.3 from minimum service version 67 [ 1764.926410] env[62684]: DEBUG oslo_concurrency.lockutils [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Releasing lock "compute-rpcapi-router" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1764.933383] env[62684]: DEBUG nova.network.neutron [-] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1764.937407] env[62684]: DEBUG oslo_concurrency.lockutils [req-f3d49a98-8848-4901-8de4-a7b28b826a86 req-3f82d50f-fc59-495e-8b78-a2af4353bb4a service nova] Releasing lock "refresh_cache-73f27fc0-ebae-41c7-b292-14396f79a5a2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1764.959374] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5a438e95-4d08-4613-9f63-89e83c1490db tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Acquiring lock "43d28811-26e4-4016-9f82-98349d4a05b7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1764.959640] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5a438e95-4d08-4613-9f63-89e83c1490db tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Lock "43d28811-26e4-4016-9f82-98349d4a05b7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1764.959895] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5a438e95-4d08-4613-9f63-89e83c1490db tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Acquiring lock "43d28811-26e4-4016-9f82-98349d4a05b7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1764.960094] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5a438e95-4d08-4613-9f63-89e83c1490db tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Lock "43d28811-26e4-4016-9f82-98349d4a05b7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1764.963018] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5a438e95-4d08-4613-9f63-89e83c1490db tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Lock "43d28811-26e4-4016-9f82-98349d4a05b7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1764.966325] env[62684]: INFO nova.compute.manager [None req-5a438e95-4d08-4613-9f63-89e83c1490db tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Terminating instance [ 1764.969642] env[62684]: DEBUG nova.compute.manager [None req-5a438e95-4d08-4613-9f63-89e83c1490db tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1764.969851] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5a438e95-4d08-4613-9f63-89e83c1490db tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1764.971058] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2df6920-4f91-4aa0-96d9-e2dd298fa322 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.983463] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a438e95-4d08-4613-9f63-89e83c1490db tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1764.983766] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a8686b75-cebb-4ed5-a3b0-14c74db0f944 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.992500] env[62684]: DEBUG oslo_vmware.api [None req-5a438e95-4d08-4613-9f63-89e83c1490db tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Waiting for the task: (returnval){ [ 1764.992500] env[62684]: value = "task-2052440" [ 1764.992500] env[62684]: _type = "Task" [ 1764.992500] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1764.997783] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Releasing lock "refresh_cache-06751c34-0724-44ba-a263-ad27fcf2920f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1764.997961] env[62684]: DEBUG nova.compute.manager [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Instance network_info: |[{"id": "b5cb1869-ace8-44cb-bd59-60e4ce4e95ad", "address": "fa:16:3e:77:50:93", "network": {"id": "bca0ee43-bbb1-483b-9d82-56955369f9b7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1592250106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aef5d7061c834332b9f9c5c75596bf08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bfa7abe-7e46-4d8f-b50a-4d0c4509e4dc", "external-id": "nsx-vlan-transportzone-951", "segmentation_id": 951, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5cb1869-ac", "ovs_interfaceid": "b5cb1869-ace8-44cb-bd59-60e4ce4e95ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1764.998885] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:77:50:93', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8bfa7abe-7e46-4d8f-b50a-4d0c4509e4dc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b5cb1869-ace8-44cb-bd59-60e4ce4e95ad', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1765.007902] env[62684]: DEBUG oslo.service.loopingcall [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1765.009432] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1765.011176] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-59627a1d-aa4c-4908-890e-0fedde0088e5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.035951] env[62684]: DEBUG oslo_vmware.api [None req-5a438e95-4d08-4613-9f63-89e83c1490db tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Task: {'id': task-2052440, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.044127] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1765.044127] env[62684]: value = "task-2052441" [ 1765.044127] env[62684]: _type = "Task" [ 1765.044127] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1765.052902] env[62684]: DEBUG oslo_vmware.api [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523494cd-49e4-f74f-b748-7ec3113af044, 'name': SearchDatastore_Task, 'duration_secs': 0.013702} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1765.056448] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7393c53c-9123-4a41-b927-fbcc82d5d29e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.060626] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052441, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.064982] env[62684]: DEBUG oslo_vmware.api [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Waiting for the task: (returnval){ [ 1765.064982] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5295f08f-9cb8-39f5-f5b7-ac7ab4415040" [ 1765.064982] env[62684]: _type = "Task" [ 1765.064982] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1765.083227] env[62684]: DEBUG oslo_concurrency.lockutils [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "e08f8636-5193-40fa-972c-f0ecab193fc1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1765.083558] env[62684]: DEBUG oslo_concurrency.lockutils [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "e08f8636-5193-40fa-972c-f0ecab193fc1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1765.083862] env[62684]: DEBUG oslo_vmware.api [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5295f08f-9cb8-39f5-f5b7-ac7ab4415040, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.321077] env[62684]: DEBUG nova.compute.manager [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1765.341553] env[62684]: DEBUG oslo_vmware.api [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Task: {'id': task-2052439, 'name': ReconfigVM_Task, 'duration_secs': 0.470619} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1765.342090] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Reconfigured VM instance instance-00000010 to attach disk [datastore1] volume-34523d13-ed90-416e-a19a-57c837136d21/volume-34523d13-ed90-416e-a19a-57c837136d21.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1765.350900] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6fdded77-a8cf-4a1e-b96f-05d316020ffa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.382023] env[62684]: DEBUG oslo_vmware.api [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Waiting for the task: (returnval){ [ 1765.382023] env[62684]: value = "task-2052442" [ 1765.382023] env[62684]: _type = "Task" [ 1765.382023] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1765.393091] env[62684]: DEBUG oslo_vmware.api [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Task: {'id': task-2052442, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.442496] env[62684]: INFO nova.compute.manager [-] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Took 1.55 seconds to deallocate network for instance. [ 1765.454604] env[62684]: DEBUG oslo_concurrency.lockutils [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquiring lock "refresh_cache-0676806b-c1f0-4c1a-a12d-add2edf1588f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1765.454935] env[62684]: DEBUG oslo_concurrency.lockutils [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquired lock "refresh_cache-0676806b-c1f0-4c1a-a12d-add2edf1588f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1765.455270] env[62684]: DEBUG nova.network.neutron [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1765.506186] env[62684]: DEBUG oslo_vmware.api [None req-5a438e95-4d08-4613-9f63-89e83c1490db tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Task: {'id': task-2052440, 'name': PowerOffVM_Task, 'duration_secs': 0.299922} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1765.507377] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a438e95-4d08-4613-9f63-89e83c1490db tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1765.507655] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5a438e95-4d08-4613-9f63-89e83c1490db tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1765.509280] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5625aacb-43df-4df4-928e-02b11478cf59 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.558014] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052441, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.578821] env[62684]: DEBUG oslo_vmware.api [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5295f08f-9cb8-39f5-f5b7-ac7ab4415040, 'name': SearchDatastore_Task, 'duration_secs': 0.013188} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1765.581153] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1765.581153] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] aec16a15-5d75-4ea6-800b-1bf67f762d89/aec16a15-5d75-4ea6-800b-1bf67f762d89.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1765.583323] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1b16e860-ce76-4bd5-831c-31d39c7e40cb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.587617] env[62684]: DEBUG oslo_concurrency.lockutils [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Acquiring lock "b788c51b-367b-4eef-93d2-faa8836469b6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1765.588047] env[62684]: DEBUG oslo_concurrency.lockutils [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Lock "b788c51b-367b-4eef-93d2-faa8836469b6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1765.595032] env[62684]: DEBUG oslo_vmware.api [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Waiting for the task: (returnval){ [ 1765.595032] env[62684]: value = "task-2052444" [ 1765.595032] env[62684]: _type = "Task" [ 1765.595032] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1765.607045] env[62684]: DEBUG oslo_vmware.api [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Task: {'id': task-2052444, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.623822] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5a438e95-4d08-4613-9f63-89e83c1490db tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1765.624105] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5a438e95-4d08-4613-9f63-89e83c1490db tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1765.624339] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a438e95-4d08-4613-9f63-89e83c1490db tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Deleting the datastore file [datastore2] 43d28811-26e4-4016-9f82-98349d4a05b7 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1765.624626] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ca071db5-4b72-44f7-9024-2bb53d30baca {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.633409] env[62684]: DEBUG oslo_vmware.api [None req-5a438e95-4d08-4613-9f63-89e83c1490db tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Waiting for the task: (returnval){ [ 1765.633409] env[62684]: value = "task-2052445" [ 1765.633409] env[62684]: _type = "Task" [ 1765.633409] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1765.645019] env[62684]: DEBUG oslo_vmware.api [None req-5a438e95-4d08-4613-9f63-89e83c1490db tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Task: {'id': task-2052445, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.853345] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1765.880011] env[62684]: DEBUG nova.compute.manager [req-ea6edb22-3c7c-4f22-988b-aa22edcb214f req-3dd78410-9e95-4fd3-a98c-26d9399936d1 service nova] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Received event network-changed-d94a0e39-627d-4191-9011-76da63ed1d8f {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1765.880011] env[62684]: DEBUG nova.compute.manager [req-ea6edb22-3c7c-4f22-988b-aa22edcb214f req-3dd78410-9e95-4fd3-a98c-26d9399936d1 service nova] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Refreshing instance network info cache due to event network-changed-d94a0e39-627d-4191-9011-76da63ed1d8f. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1765.880011] env[62684]: DEBUG oslo_concurrency.lockutils [req-ea6edb22-3c7c-4f22-988b-aa22edcb214f req-3dd78410-9e95-4fd3-a98c-26d9399936d1 service nova] Acquiring lock "refresh_cache-aec16a15-5d75-4ea6-800b-1bf67f762d89" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1765.880444] env[62684]: DEBUG oslo_concurrency.lockutils [req-ea6edb22-3c7c-4f22-988b-aa22edcb214f req-3dd78410-9e95-4fd3-a98c-26d9399936d1 service nova] Acquired lock "refresh_cache-aec16a15-5d75-4ea6-800b-1bf67f762d89" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1765.880667] env[62684]: DEBUG nova.network.neutron [req-ea6edb22-3c7c-4f22-988b-aa22edcb214f req-3dd78410-9e95-4fd3-a98c-26d9399936d1 service nova] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Refreshing network info cache for port d94a0e39-627d-4191-9011-76da63ed1d8f {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1765.895790] env[62684]: DEBUG oslo_vmware.api [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Task: {'id': task-2052442, 'name': ReconfigVM_Task, 'duration_secs': 0.216481} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1765.897051] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421128', 'volume_id': '34523d13-ed90-416e-a19a-57c837136d21', 'name': 'volume-34523d13-ed90-416e-a19a-57c837136d21', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5bc73032-45f9-4b5c-a4ea-e07c48e4f82b', 'attached_at': '', 'detached_at': '', 'volume_id': '34523d13-ed90-416e-a19a-57c837136d21', 'serial': '34523d13-ed90-416e-a19a-57c837136d21'} {{(pid=62684) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1765.898188] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9df0a874-45fb-4e80-8d95-c1629b64187b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.908466] env[62684]: DEBUG oslo_vmware.api [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Waiting for the task: (returnval){ [ 1765.908466] env[62684]: value = "task-2052446" [ 1765.908466] env[62684]: _type = "Task" [ 1765.908466] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1765.925622] env[62684]: DEBUG oslo_vmware.api [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Task: {'id': task-2052446, 'name': Rename_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.952990] env[62684]: DEBUG oslo_concurrency.lockutils [None req-610ebd80-6d9b-40af-9d21-7178b1ec7cf4 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1765.993989] env[62684]: DEBUG nova.compute.manager [req-96ad1f89-dc75-47ca-9aec-3491a19de632 req-07d00fe7-aa87-49ab-9879-caab38ebed9d service nova] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Received event network-vif-plugged-b5cb1869-ace8-44cb-bd59-60e4ce4e95ad {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1765.997827] env[62684]: DEBUG oslo_concurrency.lockutils [req-96ad1f89-dc75-47ca-9aec-3491a19de632 req-07d00fe7-aa87-49ab-9879-caab38ebed9d service nova] Acquiring lock "06751c34-0724-44ba-a263-ad27fcf2920f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1765.998363] env[62684]: DEBUG oslo_concurrency.lockutils [req-96ad1f89-dc75-47ca-9aec-3491a19de632 req-07d00fe7-aa87-49ab-9879-caab38ebed9d service nova] Lock "06751c34-0724-44ba-a263-ad27fcf2920f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1765.998468] env[62684]: DEBUG oslo_concurrency.lockutils [req-96ad1f89-dc75-47ca-9aec-3491a19de632 req-07d00fe7-aa87-49ab-9879-caab38ebed9d service nova] Lock "06751c34-0724-44ba-a263-ad27fcf2920f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1765.998682] env[62684]: DEBUG nova.compute.manager [req-96ad1f89-dc75-47ca-9aec-3491a19de632 req-07d00fe7-aa87-49ab-9879-caab38ebed9d service nova] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] No waiting events found dispatching network-vif-plugged-b5cb1869-ace8-44cb-bd59-60e4ce4e95ad {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1765.999408] env[62684]: WARNING nova.compute.manager [req-96ad1f89-dc75-47ca-9aec-3491a19de632 req-07d00fe7-aa87-49ab-9879-caab38ebed9d service nova] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Received unexpected event network-vif-plugged-b5cb1869-ace8-44cb-bd59-60e4ce4e95ad for instance with vm_state building and task_state spawning. [ 1765.999732] env[62684]: DEBUG nova.compute.manager [req-96ad1f89-dc75-47ca-9aec-3491a19de632 req-07d00fe7-aa87-49ab-9879-caab38ebed9d service nova] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Received event network-changed-b5cb1869-ace8-44cb-bd59-60e4ce4e95ad {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1765.999979] env[62684]: DEBUG nova.compute.manager [req-96ad1f89-dc75-47ca-9aec-3491a19de632 req-07d00fe7-aa87-49ab-9879-caab38ebed9d service nova] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Refreshing instance network info cache due to event network-changed-b5cb1869-ace8-44cb-bd59-60e4ce4e95ad. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1766.000280] env[62684]: DEBUG oslo_concurrency.lockutils [req-96ad1f89-dc75-47ca-9aec-3491a19de632 req-07d00fe7-aa87-49ab-9879-caab38ebed9d service nova] Acquiring lock "refresh_cache-06751c34-0724-44ba-a263-ad27fcf2920f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1766.000478] env[62684]: DEBUG oslo_concurrency.lockutils [req-96ad1f89-dc75-47ca-9aec-3491a19de632 req-07d00fe7-aa87-49ab-9879-caab38ebed9d service nova] Acquired lock "refresh_cache-06751c34-0724-44ba-a263-ad27fcf2920f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1766.000689] env[62684]: DEBUG nova.network.neutron [req-96ad1f89-dc75-47ca-9aec-3491a19de632 req-07d00fe7-aa87-49ab-9879-caab38ebed9d service nova] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Refreshing network info cache for port b5cb1869-ace8-44cb-bd59-60e4ce4e95ad {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1766.059026] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052441, 'name': CreateVM_Task, 'duration_secs': 0.588585} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1766.059026] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1766.059850] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1766.060236] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1766.060994] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1766.064601] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fcedc675-b4aa-4fb4-8472-818a364f16a9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.075132] env[62684]: DEBUG oslo_vmware.api [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1766.075132] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5286487a-829d-c114-3091-b799e1226ea0" [ 1766.075132] env[62684]: _type = "Task" [ 1766.075132] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.089814] env[62684]: DEBUG oslo_vmware.api [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5286487a-829d-c114-3091-b799e1226ea0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.108906] env[62684]: DEBUG oslo_vmware.api [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Task: {'id': task-2052444, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.148329] env[62684]: DEBUG oslo_vmware.api [None req-5a438e95-4d08-4613-9f63-89e83c1490db tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Task: {'id': task-2052445, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.276086} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1766.148731] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a438e95-4d08-4613-9f63-89e83c1490db tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1766.149270] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5a438e95-4d08-4613-9f63-89e83c1490db tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1766.149594] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5a438e95-4d08-4613-9f63-89e83c1490db tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1766.149904] env[62684]: INFO nova.compute.manager [None req-5a438e95-4d08-4613-9f63-89e83c1490db tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1766.150511] env[62684]: DEBUG oslo.service.loopingcall [None req-5a438e95-4d08-4613-9f63-89e83c1490db tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1766.153956] env[62684]: DEBUG nova.compute.manager [-] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1766.154122] env[62684]: DEBUG nova.network.neutron [-] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1766.253465] env[62684]: DEBUG nova.network.neutron [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Updating instance_info_cache with network_info: [{"id": "10023d3d-f0cd-49c9-984f-fb3f2af83e3b", "address": "fa:16:3e:2d:b8:1b", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.60", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10023d3d-f0", "ovs_interfaceid": "10023d3d-f0cd-49c9-984f-fb3f2af83e3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1766.421273] env[62684]: DEBUG oslo_vmware.api [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Task: {'id': task-2052446, 'name': Rename_Task, 'duration_secs': 0.419436} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1766.421715] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1766.425602] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-600aa6d4-4971-4b23-a149-67c35055468f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.435730] env[62684]: DEBUG oslo_vmware.api [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Waiting for the task: (returnval){ [ 1766.435730] env[62684]: value = "task-2052447" [ 1766.435730] env[62684]: _type = "Task" [ 1766.435730] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.449309] env[62684]: DEBUG oslo_vmware.api [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Task: {'id': task-2052447, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.573871] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acb8a775-f8d6-4a54-99b0-44a54706dfc1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.592490] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f09ca1ff-3ecc-4768-9409-19ddd07e3d0a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.596964] env[62684]: DEBUG oslo_vmware.api [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5286487a-829d-c114-3091-b799e1226ea0, 'name': SearchDatastore_Task, 'duration_secs': 0.060834} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1766.597422] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1766.597987] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1766.597987] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1766.598149] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1766.598305] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1766.601988] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-38b14385-89cf-425e-a2d7-0de6f2be118d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.645812] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3eb80c7-b431-4727-a795-dba713a7947c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.654790] env[62684]: DEBUG oslo_vmware.api [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Task: {'id': task-2052444, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.657467} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1766.655109] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1766.655382] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1766.656962] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] aec16a15-5d75-4ea6-800b-1bf67f762d89/aec16a15-5d75-4ea6-800b-1bf67f762d89.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1766.657204] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1766.657472] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3544a270-3328-4794-ba36-a41560a70999 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.662031] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3ab275bc-61b7-4fd4-b8ac-7e757d87ed21 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.666640] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9868c1f5-4db6-428f-b17f-1b37e34a2695 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.672343] env[62684]: DEBUG oslo_vmware.api [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1766.672343] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520cd868-6b3f-f43e-a5bc-0360f826bc77" [ 1766.672343] env[62684]: _type = "Task" [ 1766.672343] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.684855] env[62684]: DEBUG nova.compute.provider_tree [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1766.690862] env[62684]: DEBUG oslo_vmware.api [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Waiting for the task: (returnval){ [ 1766.690862] env[62684]: value = "task-2052448" [ 1766.690862] env[62684]: _type = "Task" [ 1766.690862] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.701715] env[62684]: DEBUG oslo_vmware.api [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520cd868-6b3f-f43e-a5bc-0360f826bc77, 'name': SearchDatastore_Task, 'duration_secs': 0.017714} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1766.703714] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5da1e2b6-f232-4c4d-a075-e9531aa11ccb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.711709] env[62684]: DEBUG oslo_vmware.api [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Task: {'id': task-2052448, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.715396] env[62684]: DEBUG oslo_vmware.api [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1766.715396] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52591c7e-cdc0-8e04-7a8d-9a850bda8e86" [ 1766.715396] env[62684]: _type = "Task" [ 1766.715396] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.727394] env[62684]: DEBUG oslo_vmware.api [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52591c7e-cdc0-8e04-7a8d-9a850bda8e86, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.757065] env[62684]: DEBUG oslo_concurrency.lockutils [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Releasing lock "refresh_cache-0676806b-c1f0-4c1a-a12d-add2edf1588f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1766.911017] env[62684]: DEBUG nova.network.neutron [req-ea6edb22-3c7c-4f22-988b-aa22edcb214f req-3dd78410-9e95-4fd3-a98c-26d9399936d1 service nova] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Updated VIF entry in instance network info cache for port d94a0e39-627d-4191-9011-76da63ed1d8f. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1766.911252] env[62684]: DEBUG nova.network.neutron [req-ea6edb22-3c7c-4f22-988b-aa22edcb214f req-3dd78410-9e95-4fd3-a98c-26d9399936d1 service nova] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Updating instance_info_cache with network_info: [{"id": "d94a0e39-627d-4191-9011-76da63ed1d8f", "address": "fa:16:3e:2b:4b:d0", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.143", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd94a0e39-62", "ovs_interfaceid": "d94a0e39-627d-4191-9011-76da63ed1d8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1766.954464] env[62684]: DEBUG oslo_vmware.api [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Task: {'id': task-2052447, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.012792] env[62684]: DEBUG nova.network.neutron [req-96ad1f89-dc75-47ca-9aec-3491a19de632 req-07d00fe7-aa87-49ab-9879-caab38ebed9d service nova] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Updated VIF entry in instance network info cache for port b5cb1869-ace8-44cb-bd59-60e4ce4e95ad. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1767.012792] env[62684]: DEBUG nova.network.neutron [req-96ad1f89-dc75-47ca-9aec-3491a19de632 req-07d00fe7-aa87-49ab-9879-caab38ebed9d service nova] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Updating instance_info_cache with network_info: [{"id": "b5cb1869-ace8-44cb-bd59-60e4ce4e95ad", "address": "fa:16:3e:77:50:93", "network": {"id": "bca0ee43-bbb1-483b-9d82-56955369f9b7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1592250106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aef5d7061c834332b9f9c5c75596bf08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bfa7abe-7e46-4d8f-b50a-4d0c4509e4dc", "external-id": "nsx-vlan-transportzone-951", "segmentation_id": 951, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5cb1869-ac", "ovs_interfaceid": "b5cb1869-ace8-44cb-bd59-60e4ce4e95ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1767.195017] env[62684]: DEBUG nova.scheduler.client.report [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1767.214015] env[62684]: DEBUG oslo_vmware.api [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Task: {'id': task-2052448, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.464726} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1767.215351] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1767.216214] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6852655-0c63-4ba9-8bf0-35ec3ac58f33 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.232569] env[62684]: DEBUG oslo_vmware.api [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52591c7e-cdc0-8e04-7a8d-9a850bda8e86, 'name': SearchDatastore_Task, 'duration_secs': 0.021236} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1767.243297] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1767.243297] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 06751c34-0724-44ba-a263-ad27fcf2920f/06751c34-0724-44ba-a263-ad27fcf2920f.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1767.252373] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] aec16a15-5d75-4ea6-800b-1bf67f762d89/aec16a15-5d75-4ea6-800b-1bf67f762d89.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1767.253788] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-60a8d582-dfc4-4873-ada7-5fa0d712915a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.258020] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c913cdcf-58d0-441d-ad81-c60ff075373e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.285955] env[62684]: DEBUG oslo_vmware.api [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1767.285955] env[62684]: value = "task-2052449" [ 1767.285955] env[62684]: _type = "Task" [ 1767.285955] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1767.287136] env[62684]: DEBUG oslo_vmware.api [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Waiting for the task: (returnval){ [ 1767.287136] env[62684]: value = "task-2052450" [ 1767.287136] env[62684]: _type = "Task" [ 1767.287136] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1767.302669] env[62684]: DEBUG oslo_vmware.api [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052449, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.310923] env[62684]: DEBUG oslo_vmware.api [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Task: {'id': task-2052450, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.402100] env[62684]: DEBUG nova.network.neutron [-] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1767.416831] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Acquiring lock "d06f3099-d05f-417f-a71a-7b368590624f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1767.417107] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Lock "d06f3099-d05f-417f-a71a-7b368590624f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1767.419245] env[62684]: DEBUG oslo_concurrency.lockutils [req-ea6edb22-3c7c-4f22-988b-aa22edcb214f req-3dd78410-9e95-4fd3-a98c-26d9399936d1 service nova] Releasing lock "refresh_cache-aec16a15-5d75-4ea6-800b-1bf67f762d89" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1767.481386] env[62684]: DEBUG oslo_vmware.api [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Task: {'id': task-2052447, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.514888] env[62684]: DEBUG oslo_concurrency.lockutils [req-96ad1f89-dc75-47ca-9aec-3491a19de632 req-07d00fe7-aa87-49ab-9879-caab38ebed9d service nova] Releasing lock "refresh_cache-06751c34-0724-44ba-a263-ad27fcf2920f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1767.514888] env[62684]: DEBUG nova.compute.manager [req-96ad1f89-dc75-47ca-9aec-3491a19de632 req-07d00fe7-aa87-49ab-9879-caab38ebed9d service nova] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Received event network-vif-deleted-fafc337e-8380-4431-acaa-5ab65e6b32d7 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1767.707383] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.794s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1767.708071] env[62684]: DEBUG nova.compute.manager [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1767.711634] env[62684]: DEBUG oslo_concurrency.lockutils [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.978s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1767.713837] env[62684]: INFO nova.compute.claims [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1767.808982] env[62684]: DEBUG oslo_vmware.api [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Task: {'id': task-2052450, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.812469] env[62684]: DEBUG oslo_vmware.api [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052449, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.906118] env[62684]: INFO nova.compute.manager [-] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Took 1.75 seconds to deallocate network for instance. [ 1767.951616] env[62684]: DEBUG oslo_vmware.api [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Task: {'id': task-2052447, 'name': PowerOnVM_Task} progress is 19%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.219558] env[62684]: DEBUG nova.compute.utils [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1768.226513] env[62684]: DEBUG nova.compute.manager [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1768.226823] env[62684]: DEBUG nova.network.neutron [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1768.289054] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bfba233-0d6f-4844-bc12-b3b51daa9d14 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.293469] env[62684]: DEBUG nova.policy [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '682887a29e454c4aa19d037af2f969e7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '12b5d28eab2e49989d1e2f1a7e523eff', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1768.324432] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Updating instance '0676806b-c1f0-4c1a-a12d-add2edf1588f' progress to 0 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1768.333612] env[62684]: DEBUG oslo_vmware.api [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052449, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.642801} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1768.334009] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 06751c34-0724-44ba-a263-ad27fcf2920f/06751c34-0724-44ba-a263-ad27fcf2920f.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1768.334262] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1768.334535] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-314453b4-8e2c-48c1-98a7-845801a3cd41 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.344479] env[62684]: DEBUG oslo_vmware.api [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Task: {'id': task-2052450, 'name': ReconfigVM_Task, 'duration_secs': 0.737197} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1768.344479] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Reconfigured VM instance instance-00000011 to attach disk [datastore1] aec16a15-5d75-4ea6-800b-1bf67f762d89/aec16a15-5d75-4ea6-800b-1bf67f762d89.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1768.344746] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ea4c20a7-6592-4a02-ae18-44d136a49217 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.350021] env[62684]: DEBUG oslo_vmware.api [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1768.350021] env[62684]: value = "task-2052451" [ 1768.350021] env[62684]: _type = "Task" [ 1768.350021] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1768.357214] env[62684]: DEBUG oslo_vmware.api [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Waiting for the task: (returnval){ [ 1768.357214] env[62684]: value = "task-2052452" [ 1768.357214] env[62684]: _type = "Task" [ 1768.357214] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1768.367955] env[62684]: DEBUG oslo_vmware.api [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052451, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.376996] env[62684]: DEBUG oslo_vmware.api [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Task: {'id': task-2052452, 'name': Rename_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.411656] env[62684]: DEBUG nova.compute.manager [req-3e90db76-1905-4a14-a9d3-f276e2042cad req-8d2d2e43-d36c-485c-9b10-0296e3e5a4b4 service nova] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Received event network-vif-deleted-200d8b57-0aad-430f-8a16-63f7ce3d1668 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1768.418024] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5a438e95-4d08-4613-9f63-89e83c1490db tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1768.450678] env[62684]: DEBUG oslo_vmware.api [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Task: {'id': task-2052447, 'name': PowerOnVM_Task} progress is 82%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.723593] env[62684]: DEBUG nova.network.neutron [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Successfully created port: de8de653-ec88-4a72-840c-27978f584581 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1768.726863] env[62684]: DEBUG nova.compute.manager [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1768.836574] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1768.836949] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1bdc83c7-62b9-4785-8b78-2df776351e3a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.862032] env[62684]: DEBUG oslo_vmware.api [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for the task: (returnval){ [ 1768.862032] env[62684]: value = "task-2052453" [ 1768.862032] env[62684]: _type = "Task" [ 1768.862032] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1768.873140] env[62684]: DEBUG oslo_vmware.api [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052451, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.202177} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1768.874030] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1768.875214] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc80f114-00f7-48fd-8214-865f0ce6ad50 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.891432] env[62684]: DEBUG oslo_vmware.api [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052453, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.891735] env[62684]: DEBUG oslo_vmware.api [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Task: {'id': task-2052452, 'name': Rename_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.919118] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Reconfiguring VM instance instance-00000012 to attach disk [datastore1] 06751c34-0724-44ba-a263-ad27fcf2920f/06751c34-0724-44ba-a263-ad27fcf2920f.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1768.923435] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3fa377d2-79c6-4c59-aba3-4dfa57078267 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.949090] env[62684]: DEBUG oslo_vmware.api [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1768.949090] env[62684]: value = "task-2052454" [ 1768.949090] env[62684]: _type = "Task" [ 1768.949090] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1768.953184] env[62684]: DEBUG oslo_vmware.api [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Task: {'id': task-2052447, 'name': PowerOnVM_Task, 'duration_secs': 2.371315} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1768.957485] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1768.957694] env[62684]: INFO nova.compute.manager [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Took 11.15 seconds to spawn the instance on the hypervisor. [ 1768.958097] env[62684]: DEBUG nova.compute.manager [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1768.963313] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0e22177-b372-40ce-91b0-d51773c6dca7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.974399] env[62684]: DEBUG oslo_vmware.api [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052454, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.352472] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae38912c-7eec-4b51-adba-b0f248b74fa5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.368908] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4bb6c4c-3364-4b4d-86d4-a5f14a3380df {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.376699] env[62684]: DEBUG oslo_vmware.api [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Task: {'id': task-2052452, 'name': Rename_Task, 'duration_secs': 0.856246} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1769.377286] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1769.377593] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a4b26ac8-3170-48ad-bbb9-a81aa06db6af {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.406306] env[62684]: DEBUG oslo_vmware.api [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052453, 'name': PowerOffVM_Task, 'duration_secs': 0.34639} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1769.408044] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1769.408148] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Updating instance '0676806b-c1f0-4c1a-a12d-add2edf1588f' progress to 17 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1769.411974] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f723ef9f-bdf5-410c-a70b-fbb5839473e4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.416438] env[62684]: DEBUG oslo_vmware.api [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Waiting for the task: (returnval){ [ 1769.416438] env[62684]: value = "task-2052455" [ 1769.416438] env[62684]: _type = "Task" [ 1769.416438] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1769.423614] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72cf9abc-c45c-4f13-a0a0-f083dec35a3b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.432803] env[62684]: DEBUG oslo_vmware.api [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Task: {'id': task-2052455, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.444880] env[62684]: DEBUG nova.compute.provider_tree [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1769.467299] env[62684]: DEBUG oslo_vmware.api [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052454, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.494910] env[62684]: INFO nova.compute.manager [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Took 32.30 seconds to build instance. [ 1769.496954] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Acquiring lock "e2a9ab56-bde3-40b6-a214-19c77a9c6778" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1769.497349] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Lock "e2a9ab56-bde3-40b6-a214-19c77a9c6778" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1769.742940] env[62684]: DEBUG nova.compute.manager [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1769.773785] env[62684]: DEBUG nova.virt.hardware [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1769.774078] env[62684]: DEBUG nova.virt.hardware [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1769.774241] env[62684]: DEBUG nova.virt.hardware [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1769.774440] env[62684]: DEBUG nova.virt.hardware [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1769.774582] env[62684]: DEBUG nova.virt.hardware [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1769.774734] env[62684]: DEBUG nova.virt.hardware [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1769.774964] env[62684]: DEBUG nova.virt.hardware [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1769.775145] env[62684]: DEBUG nova.virt.hardware [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1769.775319] env[62684]: DEBUG nova.virt.hardware [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1769.775486] env[62684]: DEBUG nova.virt.hardware [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1769.775664] env[62684]: DEBUG nova.virt.hardware [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1769.777008] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25f16d51-0847-4419-adfa-e4154d4021e9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.788171] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81f93408-2f61-4364-8010-daa32eed0556 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.918920] env[62684]: DEBUG nova.virt.hardware [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1769.919315] env[62684]: DEBUG nova.virt.hardware [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1769.919618] env[62684]: DEBUG nova.virt.hardware [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1769.919618] env[62684]: DEBUG nova.virt.hardware [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1769.919767] env[62684]: DEBUG nova.virt.hardware [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1769.919879] env[62684]: DEBUG nova.virt.hardware [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1769.920166] env[62684]: DEBUG nova.virt.hardware [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1769.920305] env[62684]: DEBUG nova.virt.hardware [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1769.920486] env[62684]: DEBUG nova.virt.hardware [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1769.920681] env[62684]: DEBUG nova.virt.hardware [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1769.921282] env[62684]: DEBUG nova.virt.hardware [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1769.926862] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d143de50-4666-4a1c-a8a2-60bfaabeaa5c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.950975] env[62684]: DEBUG nova.scheduler.client.report [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1769.954414] env[62684]: DEBUG oslo_vmware.api [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Task: {'id': task-2052455, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.956182] env[62684]: DEBUG oslo_vmware.api [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for the task: (returnval){ [ 1769.956182] env[62684]: value = "task-2052456" [ 1769.956182] env[62684]: _type = "Task" [ 1769.956182] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1769.969232] env[62684]: DEBUG oslo_vmware.api [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052456, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.974733] env[62684]: DEBUG oslo_vmware.api [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052454, 'name': ReconfigVM_Task, 'duration_secs': 0.695513} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1769.975502] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Reconfigured VM instance instance-00000012 to attach disk [datastore1] 06751c34-0724-44ba-a263-ad27fcf2920f/06751c34-0724-44ba-a263-ad27fcf2920f.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1769.976244] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cab66258-f9b1-44a3-a79e-67ba52fcf38e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.985840] env[62684]: DEBUG oslo_vmware.api [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1769.985840] env[62684]: value = "task-2052457" [ 1769.985840] env[62684]: _type = "Task" [ 1769.985840] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1769.996782] env[62684]: DEBUG oslo_vmware.api [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052457, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.998427] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6bf4b825-adea-464e-b503-4c4157e06208 tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Lock "5bc73032-45f9-4b5c-a4ea-e07c48e4f82b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.707s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1770.439497] env[62684]: DEBUG oslo_vmware.api [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Task: {'id': task-2052455, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.458611] env[62684]: DEBUG oslo_concurrency.lockutils [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.747s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1770.459233] env[62684]: DEBUG nova.compute.manager [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1770.463497] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8d0e0996-9d89-478a-ad1e-8a3586b21ebe tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.684s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1770.463497] env[62684]: DEBUG nova.objects.instance [None req-8d0e0996-9d89-478a-ad1e-8a3586b21ebe tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Lazy-loading 'resources' on Instance uuid c1580c72-9345-436e-b4f7-56d319248864 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1770.477429] env[62684]: DEBUG oslo_vmware.api [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052456, 'name': ReconfigVM_Task, 'duration_secs': 0.226649} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1770.477680] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Updating instance '0676806b-c1f0-4c1a-a12d-add2edf1588f' progress to 33 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1770.494898] env[62684]: DEBUG oslo_vmware.api [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052457, 'name': Rename_Task, 'duration_secs': 0.238179} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1770.495516] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1770.495875] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e591aa30-a022-4032-ac4b-660f8b7cb85b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.501964] env[62684]: DEBUG nova.compute.manager [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1770.505834] env[62684]: DEBUG oslo_vmware.api [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1770.505834] env[62684]: value = "task-2052458" [ 1770.505834] env[62684]: _type = "Task" [ 1770.505834] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1770.516236] env[62684]: DEBUG oslo_vmware.api [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052458, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.865958] env[62684]: DEBUG nova.compute.manager [req-d0fa0cbb-a73f-4648-850b-d4bfcf3c8c42 req-50d0c3e3-bc34-4a40-9e32-cc98b3d27217 service nova] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Received event network-vif-plugged-de8de653-ec88-4a72-840c-27978f584581 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1770.866234] env[62684]: DEBUG oslo_concurrency.lockutils [req-d0fa0cbb-a73f-4648-850b-d4bfcf3c8c42 req-50d0c3e3-bc34-4a40-9e32-cc98b3d27217 service nova] Acquiring lock "0dbd52ac-c987-4728-974e-73e99465c5e7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1770.866428] env[62684]: DEBUG oslo_concurrency.lockutils [req-d0fa0cbb-a73f-4648-850b-d4bfcf3c8c42 req-50d0c3e3-bc34-4a40-9e32-cc98b3d27217 service nova] Lock "0dbd52ac-c987-4728-974e-73e99465c5e7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1770.866824] env[62684]: DEBUG oslo_concurrency.lockutils [req-d0fa0cbb-a73f-4648-850b-d4bfcf3c8c42 req-50d0c3e3-bc34-4a40-9e32-cc98b3d27217 service nova] Lock "0dbd52ac-c987-4728-974e-73e99465c5e7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1770.866824] env[62684]: DEBUG nova.compute.manager [req-d0fa0cbb-a73f-4648-850b-d4bfcf3c8c42 req-50d0c3e3-bc34-4a40-9e32-cc98b3d27217 service nova] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] No waiting events found dispatching network-vif-plugged-de8de653-ec88-4a72-840c-27978f584581 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1770.866932] env[62684]: WARNING nova.compute.manager [req-d0fa0cbb-a73f-4648-850b-d4bfcf3c8c42 req-50d0c3e3-bc34-4a40-9e32-cc98b3d27217 service nova] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Received unexpected event network-vif-plugged-de8de653-ec88-4a72-840c-27978f584581 for instance with vm_state building and task_state spawning. [ 1770.941466] env[62684]: DEBUG oslo_vmware.api [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Task: {'id': task-2052455, 'name': PowerOnVM_Task, 'duration_secs': 1.249967} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1770.941803] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1770.942022] env[62684]: INFO nova.compute.manager [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Took 11.99 seconds to spawn the instance on the hypervisor. [ 1770.942371] env[62684]: DEBUG nova.compute.manager [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1770.943096] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-297c6349-3834-43fc-91a2-dd265dbcb2bd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.971872] env[62684]: DEBUG nova.compute.utils [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1770.971872] env[62684]: DEBUG nova.compute.manager [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1770.971971] env[62684]: DEBUG nova.network.neutron [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1770.974630] env[62684]: DEBUG nova.network.neutron [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Successfully updated port: de8de653-ec88-4a72-840c-27978f584581 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1770.985192] env[62684]: DEBUG nova.virt.hardware [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1770.985192] env[62684]: DEBUG nova.virt.hardware [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1770.985192] env[62684]: DEBUG nova.virt.hardware [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1770.985192] env[62684]: DEBUG nova.virt.hardware [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1770.985454] env[62684]: DEBUG nova.virt.hardware [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1770.985454] env[62684]: DEBUG nova.virt.hardware [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1770.985454] env[62684]: DEBUG nova.virt.hardware [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1770.985454] env[62684]: DEBUG nova.virt.hardware [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1770.986844] env[62684]: DEBUG nova.virt.hardware [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1770.986844] env[62684]: DEBUG nova.virt.hardware [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1770.986844] env[62684]: DEBUG nova.virt.hardware [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1770.994204] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Reconfiguring VM instance instance-00000006 to detach disk 2000 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1770.994373] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f50c1a85-ba95-49fe-8b49-486995c5f26a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.028465] env[62684]: DEBUG oslo_vmware.api [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for the task: (returnval){ [ 1771.028465] env[62684]: value = "task-2052459" [ 1771.028465] env[62684]: _type = "Task" [ 1771.028465] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.034010] env[62684]: DEBUG oslo_vmware.api [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052458, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.038222] env[62684]: DEBUG oslo_concurrency.lockutils [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1771.045228] env[62684]: DEBUG oslo_vmware.api [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052459, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.062997] env[62684]: DEBUG nova.policy [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c7cf299748e54a5797117ee8ff1695df', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '98ce92c22eff45fdbd73acff31aca8dd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1771.102024] env[62684]: DEBUG nova.compute.manager [req-68728536-0fce-4f0e-b24e-5c07d2c90a74 req-39fd0df7-9a0a-47b1-9a3f-0156be3112ab service nova] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Received event network-changed-c0047526-de96-4c14-8230-e69c53c790af {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1771.102024] env[62684]: DEBUG nova.compute.manager [req-68728536-0fce-4f0e-b24e-5c07d2c90a74 req-39fd0df7-9a0a-47b1-9a3f-0156be3112ab service nova] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Refreshing instance network info cache due to event network-changed-c0047526-de96-4c14-8230-e69c53c790af. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1771.102138] env[62684]: DEBUG oslo_concurrency.lockutils [req-68728536-0fce-4f0e-b24e-5c07d2c90a74 req-39fd0df7-9a0a-47b1-9a3f-0156be3112ab service nova] Acquiring lock "refresh_cache-5bc73032-45f9-4b5c-a4ea-e07c48e4f82b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1771.102311] env[62684]: DEBUG oslo_concurrency.lockutils [req-68728536-0fce-4f0e-b24e-5c07d2c90a74 req-39fd0df7-9a0a-47b1-9a3f-0156be3112ab service nova] Acquired lock "refresh_cache-5bc73032-45f9-4b5c-a4ea-e07c48e4f82b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1771.102475] env[62684]: DEBUG nova.network.neutron [req-68728536-0fce-4f0e-b24e-5c07d2c90a74 req-39fd0df7-9a0a-47b1-9a3f-0156be3112ab service nova] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Refreshing network info cache for port c0047526-de96-4c14-8230-e69c53c790af {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1771.467198] env[62684]: INFO nova.compute.manager [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Took 33.50 seconds to build instance. [ 1771.477083] env[62684]: DEBUG nova.compute.manager [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1771.480099] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Acquiring lock "refresh_cache-0dbd52ac-c987-4728-974e-73e99465c5e7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1771.480256] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Acquired lock "refresh_cache-0dbd52ac-c987-4728-974e-73e99465c5e7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1771.480402] env[62684]: DEBUG nova.network.neutron [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1771.536925] env[62684]: DEBUG oslo_vmware.api [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052458, 'name': PowerOnVM_Task, 'duration_secs': 0.899388} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1771.537735] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1771.538118] env[62684]: INFO nova.compute.manager [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Took 9.47 seconds to spawn the instance on the hypervisor. [ 1771.538408] env[62684]: DEBUG nova.compute.manager [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1771.539195] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af140cf5-f367-476c-9138-5903a9895889 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.549132] env[62684]: DEBUG oslo_vmware.api [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052459, 'name': ReconfigVM_Task, 'duration_secs': 0.296742} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1771.552514] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Reconfigured VM instance instance-00000006 to detach disk 2000 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1771.555144] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad785ac3-dbbd-4ddf-ab34-68ff78695960 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.586552] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Reconfiguring VM instance instance-00000006 to attach disk [datastore2] 0676806b-c1f0-4c1a-a12d-add2edf1588f/0676806b-c1f0-4c1a-a12d-add2edf1588f.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1771.589788] env[62684]: DEBUG nova.network.neutron [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Successfully created port: 292689a2-e664-4a36-bbc0-9f7465f7d256 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1771.595150] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c864b4de-3dc1-4a39-baa1-c553b9f280c4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.621499] env[62684]: DEBUG oslo_vmware.api [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for the task: (returnval){ [ 1771.621499] env[62684]: value = "task-2052460" [ 1771.621499] env[62684]: _type = "Task" [ 1771.621499] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.637315] env[62684]: DEBUG oslo_vmware.api [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052460, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.693524] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edabd9ea-7e57-4d28-ba9b-88ec1df208e6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.706403] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69837e19-22b5-4068-adf1-730b47f67f60 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.740472] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc6a3a06-e772-4987-ab3f-2aa6da5904be {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.754293] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d151934-93fd-4e04-832b-9091af169ab6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.786115] env[62684]: DEBUG nova.compute.provider_tree [None req-8d0e0996-9d89-478a-ad1e-8a3586b21ebe tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1771.897152] env[62684]: DEBUG nova.compute.manager [None req-6d0a38d2-1624-4e1c-a5d2-099b5fafefc0 tempest-ServerDiagnosticsTest-740730874 tempest-ServerDiagnosticsTest-740730874-project-admin] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1771.902137] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cd9221f-741d-4420-a91a-5ad297d89a0c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.911840] env[62684]: INFO nova.compute.manager [None req-6d0a38d2-1624-4e1c-a5d2-099b5fafefc0 tempest-ServerDiagnosticsTest-740730874 tempest-ServerDiagnosticsTest-740730874-project-admin] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Retrieving diagnostics [ 1771.912757] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf9309dd-2756-46ab-8473-dec7acae1fb8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.970953] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9e3b4067-ab0a-4326-b607-278d42e870cf tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Lock "aec16a15-5d75-4ea6-800b-1bf67f762d89" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.194s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1772.047351] env[62684]: DEBUG nova.network.neutron [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1772.073118] env[62684]: INFO nova.compute.manager [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Took 33.20 seconds to build instance. [ 1772.114076] env[62684]: DEBUG nova.network.neutron [req-68728536-0fce-4f0e-b24e-5c07d2c90a74 req-39fd0df7-9a0a-47b1-9a3f-0156be3112ab service nova] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Updated VIF entry in instance network info cache for port c0047526-de96-4c14-8230-e69c53c790af. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1772.114485] env[62684]: DEBUG nova.network.neutron [req-68728536-0fce-4f0e-b24e-5c07d2c90a74 req-39fd0df7-9a0a-47b1-9a3f-0156be3112ab service nova] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Updating instance_info_cache with network_info: [{"id": "c0047526-de96-4c14-8230-e69c53c790af", "address": "fa:16:3e:80:f4:14", "network": {"id": "5c74d1ef-f900-4b04-bac5-bfa28ffd8537", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-2074219577-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.214", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c873948cf2a646008a7fffc544a6a8fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ef746c57-cd18-4883-a0e9-c52937aaf41d", "external-id": "nsx-vlan-transportzone-863", "segmentation_id": 863, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0047526-de", "ovs_interfaceid": "c0047526-de96-4c14-8230-e69c53c790af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1772.133409] env[62684]: DEBUG oslo_vmware.api [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052460, 'name': ReconfigVM_Task, 'duration_secs': 0.407762} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1772.133781] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Reconfigured VM instance instance-00000006 to attach disk [datastore2] 0676806b-c1f0-4c1a-a12d-add2edf1588f/0676806b-c1f0-4c1a-a12d-add2edf1588f.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1772.134093] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Updating instance '0676806b-c1f0-4c1a-a12d-add2edf1588f' progress to 50 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1772.290546] env[62684]: DEBUG nova.scheduler.client.report [None req-8d0e0996-9d89-478a-ad1e-8a3586b21ebe tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1772.301016] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1772.364862] env[62684]: DEBUG nova.network.neutron [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Updating instance_info_cache with network_info: [{"id": "de8de653-ec88-4a72-840c-27978f584581", "address": "fa:16:3e:49:29:8e", "network": {"id": "b71fbfa9-df50-40cb-95c3-272b6a724bc9", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-984806882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12b5d28eab2e49989d1e2f1a7e523eff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6eb7e3e9-5cc2-40f1-a6eb-f70f06531667", "external-id": "nsx-vlan-transportzone-938", "segmentation_id": 938, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde8de653-ec", "ovs_interfaceid": "de8de653-ec88-4a72-840c-27978f584581", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1772.480818] env[62684]: DEBUG nova.compute.manager [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1772.493025] env[62684]: DEBUG nova.compute.manager [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1772.532033] env[62684]: DEBUG nova.virt.hardware [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1772.532033] env[62684]: DEBUG nova.virt.hardware [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1772.532033] env[62684]: DEBUG nova.virt.hardware [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1772.532219] env[62684]: DEBUG nova.virt.hardware [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1772.532219] env[62684]: DEBUG nova.virt.hardware [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1772.532219] env[62684]: DEBUG nova.virt.hardware [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1772.532639] env[62684]: DEBUG nova.virt.hardware [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1772.532938] env[62684]: DEBUG nova.virt.hardware [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1772.533253] env[62684]: DEBUG nova.virt.hardware [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1772.537166] env[62684]: DEBUG nova.virt.hardware [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1772.537166] env[62684]: DEBUG nova.virt.hardware [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1772.537166] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da3cc0af-7fb0-477e-b7e6-f1d342cc5e3e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.545442] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09b80320-f786-4faa-b5ab-63eee39a0693 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.575476] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a34d5c94-fc11-4be7-abd4-8904d0a2aef1 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "06751c34-0724-44ba-a263-ad27fcf2920f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.718s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1772.617074] env[62684]: DEBUG oslo_concurrency.lockutils [req-68728536-0fce-4f0e-b24e-5c07d2c90a74 req-39fd0df7-9a0a-47b1-9a3f-0156be3112ab service nova] Releasing lock "refresh_cache-5bc73032-45f9-4b5c-a4ea-e07c48e4f82b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1772.644250] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f03d9c86-8cbe-491d-ae04-571c0f0fe086 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.666369] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c400990-c0b7-4fb0-92d7-5367ef6c4d82 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.685194] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Updating instance '0676806b-c1f0-4c1a-a12d-add2edf1588f' progress to 67 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1772.800417] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8d0e0996-9d89-478a-ad1e-8a3586b21ebe tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.336s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1772.802853] env[62684]: DEBUG oslo_concurrency.lockutils [None req-55b959b2-8e05-47b8-a053-7dc19cb969a3 tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.618s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1772.803336] env[62684]: DEBUG nova.objects.instance [None req-55b959b2-8e05-47b8-a053-7dc19cb969a3 tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Lazy-loading 'resources' on Instance uuid 91869c00-edd0-40a8-84df-d8842d750558 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1772.831482] env[62684]: INFO nova.scheduler.client.report [None req-8d0e0996-9d89-478a-ad1e-8a3586b21ebe tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Deleted allocations for instance c1580c72-9345-436e-b4f7-56d319248864 [ 1772.869283] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Releasing lock "refresh_cache-0dbd52ac-c987-4728-974e-73e99465c5e7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1772.869642] env[62684]: DEBUG nova.compute.manager [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Instance network_info: |[{"id": "de8de653-ec88-4a72-840c-27978f584581", "address": "fa:16:3e:49:29:8e", "network": {"id": "b71fbfa9-df50-40cb-95c3-272b6a724bc9", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-984806882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12b5d28eab2e49989d1e2f1a7e523eff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6eb7e3e9-5cc2-40f1-a6eb-f70f06531667", "external-id": "nsx-vlan-transportzone-938", "segmentation_id": 938, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde8de653-ec", "ovs_interfaceid": "de8de653-ec88-4a72-840c-27978f584581", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1772.870429] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:49:29:8e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6eb7e3e9-5cc2-40f1-a6eb-f70f06531667', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'de8de653-ec88-4a72-840c-27978f584581', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1772.885545] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Creating folder: Project (12b5d28eab2e49989d1e2f1a7e523eff). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1772.886250] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-04756c9e-7f93-44ef-b999-5c838eaed0c9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.901631] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Created folder: Project (12b5d28eab2e49989d1e2f1a7e523eff) in parent group-v421118. [ 1772.901909] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Creating folder: Instances. Parent ref: group-v421180. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1772.902124] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-78523497-3740-433c-82d6-9f781f7c8132 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.916178] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Created folder: Instances in parent group-v421180. [ 1772.917032] env[62684]: DEBUG oslo.service.loopingcall [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1772.917417] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1772.917892] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-22bd7c1a-843d-4cc5-9a82-f5f058947e0b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.941617] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1772.941617] env[62684]: value = "task-2052463" [ 1772.941617] env[62684]: _type = "Task" [ 1772.941617] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1772.952571] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052463, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.006015] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1773.078944] env[62684]: DEBUG nova.compute.manager [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1773.291429] env[62684]: DEBUG nova.network.neutron [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Port 10023d3d-f0cd-49c9-984f-fb3f2af83e3b binding to destination host cpu-1 is already ACTIVE {{(pid=62684) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1773.337822] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8d0e0996-9d89-478a-ad1e-8a3586b21ebe tempest-TenantUsagesTestJSON-1755921923 tempest-TenantUsagesTestJSON-1755921923-project-member] Lock "c1580c72-9345-436e-b4f7-56d319248864" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.965s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1773.451019] env[62684]: DEBUG nova.compute.manager [req-7f9b967c-d92d-4f86-b7f2-0e8a69b93486 req-5837ca18-d85b-489a-84aa-cc6546e536fd service nova] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Received event network-changed-de8de653-ec88-4a72-840c-27978f584581 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1773.451019] env[62684]: DEBUG nova.compute.manager [req-7f9b967c-d92d-4f86-b7f2-0e8a69b93486 req-5837ca18-d85b-489a-84aa-cc6546e536fd service nova] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Refreshing instance network info cache due to event network-changed-de8de653-ec88-4a72-840c-27978f584581. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1773.451019] env[62684]: DEBUG oslo_concurrency.lockutils [req-7f9b967c-d92d-4f86-b7f2-0e8a69b93486 req-5837ca18-d85b-489a-84aa-cc6546e536fd service nova] Acquiring lock "refresh_cache-0dbd52ac-c987-4728-974e-73e99465c5e7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1773.451019] env[62684]: DEBUG oslo_concurrency.lockutils [req-7f9b967c-d92d-4f86-b7f2-0e8a69b93486 req-5837ca18-d85b-489a-84aa-cc6546e536fd service nova] Acquired lock "refresh_cache-0dbd52ac-c987-4728-974e-73e99465c5e7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1773.451019] env[62684]: DEBUG nova.network.neutron [req-7f9b967c-d92d-4f86-b7f2-0e8a69b93486 req-5837ca18-d85b-489a-84aa-cc6546e536fd service nova] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Refreshing network info cache for port de8de653-ec88-4a72-840c-27978f584581 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1773.473151] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052463, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.486109] env[62684]: DEBUG nova.network.neutron [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Successfully updated port: 292689a2-e664-4a36-bbc0-9f7465f7d256 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1773.603697] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1773.669929] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f0f23944-6afb-480a-8b3f-1f918b7b1dc3 tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Acquiring lock "aec16a15-5d75-4ea6-800b-1bf67f762d89" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1773.670807] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f0f23944-6afb-480a-8b3f-1f918b7b1dc3 tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Lock "aec16a15-5d75-4ea6-800b-1bf67f762d89" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1773.670807] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f0f23944-6afb-480a-8b3f-1f918b7b1dc3 tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Acquiring lock "aec16a15-5d75-4ea6-800b-1bf67f762d89-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1773.670807] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f0f23944-6afb-480a-8b3f-1f918b7b1dc3 tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Lock "aec16a15-5d75-4ea6-800b-1bf67f762d89-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1773.670807] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f0f23944-6afb-480a-8b3f-1f918b7b1dc3 tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Lock "aec16a15-5d75-4ea6-800b-1bf67f762d89-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1773.675412] env[62684]: INFO nova.compute.manager [None req-f0f23944-6afb-480a-8b3f-1f918b7b1dc3 tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Terminating instance [ 1773.677795] env[62684]: DEBUG nova.compute.manager [None req-f0f23944-6afb-480a-8b3f-1f918b7b1dc3 tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1773.678016] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f0f23944-6afb-480a-8b3f-1f918b7b1dc3 tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1773.679013] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c100fd2-d711-4d08-b261-1b03443b30c7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.688739] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0f23944-6afb-480a-8b3f-1f918b7b1dc3 tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1773.692609] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-18fa2e97-0439-416e-b4d1-fc0d13ea2689 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.701271] env[62684]: DEBUG oslo_vmware.api [None req-f0f23944-6afb-480a-8b3f-1f918b7b1dc3 tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Waiting for the task: (returnval){ [ 1773.701271] env[62684]: value = "task-2052464" [ 1773.701271] env[62684]: _type = "Task" [ 1773.701271] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1773.716658] env[62684]: DEBUG oslo_vmware.api [None req-f0f23944-6afb-480a-8b3f-1f918b7b1dc3 tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Task: {'id': task-2052464, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.882611] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-108a9afb-2e00-40a8-a176-10019b4101b3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.896745] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a2a7a89-4d2e-42d9-b43e-58be90822c13 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.933502] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a330a9f-9774-474d-b0e8-02e3e7d411df {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.942816] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89d27796-f1b4-4e23-b7c4-5a3809c02f64 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.964931] env[62684]: DEBUG nova.compute.provider_tree [None req-55b959b2-8e05-47b8-a053-7dc19cb969a3 tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1773.969853] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052463, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.990157] env[62684]: DEBUG oslo_concurrency.lockutils [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Acquiring lock "refresh_cache-a4767855-0c1d-48c8-98cc-6532ff140b5c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1773.990157] env[62684]: DEBUG oslo_concurrency.lockutils [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Acquired lock "refresh_cache-a4767855-0c1d-48c8-98cc-6532ff140b5c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1773.990157] env[62684]: DEBUG nova.network.neutron [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1774.218863] env[62684]: DEBUG oslo_vmware.api [None req-f0f23944-6afb-480a-8b3f-1f918b7b1dc3 tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Task: {'id': task-2052464, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.298961] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1774.301045] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1774.302184] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Starting heal instance info cache {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1774.302346] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Rebuilding the list of instances to heal {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1774.323865] env[62684]: DEBUG oslo_concurrency.lockutils [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquiring lock "0676806b-c1f0-4c1a-a12d-add2edf1588f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1774.323865] env[62684]: DEBUG oslo_concurrency.lockutils [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Lock "0676806b-c1f0-4c1a-a12d-add2edf1588f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1774.323865] env[62684]: DEBUG oslo_concurrency.lockutils [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Lock "0676806b-c1f0-4c1a-a12d-add2edf1588f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1774.343359] env[62684]: DEBUG nova.network.neutron [req-7f9b967c-d92d-4f86-b7f2-0e8a69b93486 req-5837ca18-d85b-489a-84aa-cc6546e536fd service nova] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Updated VIF entry in instance network info cache for port de8de653-ec88-4a72-840c-27978f584581. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1774.343746] env[62684]: DEBUG nova.network.neutron [req-7f9b967c-d92d-4f86-b7f2-0e8a69b93486 req-5837ca18-d85b-489a-84aa-cc6546e536fd service nova] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Updating instance_info_cache with network_info: [{"id": "de8de653-ec88-4a72-840c-27978f584581", "address": "fa:16:3e:49:29:8e", "network": {"id": "b71fbfa9-df50-40cb-95c3-272b6a724bc9", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-984806882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12b5d28eab2e49989d1e2f1a7e523eff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6eb7e3e9-5cc2-40f1-a6eb-f70f06531667", "external-id": "nsx-vlan-transportzone-938", "segmentation_id": 938, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde8de653-ec", "ovs_interfaceid": "de8de653-ec88-4a72-840c-27978f584581", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1774.364064] env[62684]: DEBUG oslo_vmware.rw_handles [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5257eb7a-4d4b-29bd-186e-5ccf1f8bbf31/disk-0.vmdk. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1774.366371] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f39e32b9-4f36-452c-bbbc-f426e4bc5fc9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.376274] env[62684]: DEBUG oslo_vmware.rw_handles [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5257eb7a-4d4b-29bd-186e-5ccf1f8bbf31/disk-0.vmdk is in state: ready. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1774.376274] env[62684]: ERROR oslo_vmware.rw_handles [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5257eb7a-4d4b-29bd-186e-5ccf1f8bbf31/disk-0.vmdk due to incomplete transfer. [ 1774.376424] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-c57fe42e-0292-45d9-8cb1-f7b52c020d2b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.387645] env[62684]: DEBUG oslo_vmware.rw_handles [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5257eb7a-4d4b-29bd-186e-5ccf1f8bbf31/disk-0.vmdk. {{(pid=62684) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1774.387645] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Uploaded image 500c73cc-1525-4d2d-8617-14b9836332de to the Glance image server {{(pid=62684) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1774.389464] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Destroying the VM {{(pid=62684) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1774.390029] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-1ac75865-39c5-436a-9527-d39bcc26bd28 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.398916] env[62684]: DEBUG oslo_vmware.api [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Waiting for the task: (returnval){ [ 1774.398916] env[62684]: value = "task-2052465" [ 1774.398916] env[62684]: _type = "Task" [ 1774.398916] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1774.410218] env[62684]: DEBUG oslo_vmware.api [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052465, 'name': Destroy_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.457716] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052463, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.474022] env[62684]: DEBUG nova.scheduler.client.report [None req-55b959b2-8e05-47b8-a053-7dc19cb969a3 tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1774.532567] env[62684]: DEBUG nova.network.neutron [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1774.714690] env[62684]: DEBUG oslo_vmware.api [None req-f0f23944-6afb-480a-8b3f-1f918b7b1dc3 tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Task: {'id': task-2052464, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.719966] env[62684]: DEBUG nova.network.neutron [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Updating instance_info_cache with network_info: [{"id": "292689a2-e664-4a36-bbc0-9f7465f7d256", "address": "fa:16:3e:18:64:ad", "network": {"id": "09f3690c-22a5-4b7d-9746-648a3bea66a4", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-124665865-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98ce92c22eff45fdbd73acff31aca8dd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d39252e-42ef-4252-98d3-62af5a0d109d", "external-id": "nsx-vlan-transportzone-190", "segmentation_id": 190, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap292689a2-e6", "ovs_interfaceid": "292689a2-e664-4a36-bbc0-9f7465f7d256", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1774.819397] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Skipping network cache update for instance because it is being deleted. {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9946}} [ 1774.819556] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Skipping network cache update for instance because it is Building. {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1774.819914] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Skipping network cache update for instance because it is Building. {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1774.820240] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "refresh_cache-0f9a525c-09b9-483e-b418-fea6e6e5dc4a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1774.820411] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired lock "refresh_cache-0f9a525c-09b9-483e-b418-fea6e6e5dc4a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1774.820579] env[62684]: DEBUG nova.network.neutron [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Forcefully refreshing network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1774.820800] env[62684]: DEBUG nova.objects.instance [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lazy-loading 'info_cache' on Instance uuid 0f9a525c-09b9-483e-b418-fea6e6e5dc4a {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1774.849703] env[62684]: DEBUG oslo_concurrency.lockutils [req-7f9b967c-d92d-4f86-b7f2-0e8a69b93486 req-5837ca18-d85b-489a-84aa-cc6546e536fd service nova] Releasing lock "refresh_cache-0dbd52ac-c987-4728-974e-73e99465c5e7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1774.910436] env[62684]: DEBUG oslo_vmware.api [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052465, 'name': Destroy_Task} progress is 33%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.960154] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052463, 'name': CreateVM_Task, 'duration_secs': 1.599493} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1774.960154] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1774.961661] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1774.962385] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1774.963184] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1774.963614] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a54f2e60-ad0a-4abb-b3b5-27a857ad0905 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.973019] env[62684]: DEBUG oslo_vmware.api [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Waiting for the task: (returnval){ [ 1774.973019] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52743691-612c-891e-3c42-7923030bd9ab" [ 1774.973019] env[62684]: _type = "Task" [ 1774.973019] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1774.982572] env[62684]: DEBUG oslo_concurrency.lockutils [None req-55b959b2-8e05-47b8-a053-7dc19cb969a3 tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.180s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1774.985778] env[62684]: DEBUG oslo_vmware.api [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52743691-612c-891e-3c42-7923030bd9ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.986616] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.928s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1774.988957] env[62684]: INFO nova.compute.claims [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1775.026760] env[62684]: INFO nova.scheduler.client.report [None req-55b959b2-8e05-47b8-a053-7dc19cb969a3 tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Deleted allocations for instance 91869c00-edd0-40a8-84df-d8842d750558 [ 1775.219485] env[62684]: DEBUG oslo_vmware.api [None req-f0f23944-6afb-480a-8b3f-1f918b7b1dc3 tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Task: {'id': task-2052464, 'name': PowerOffVM_Task, 'duration_secs': 1.094916} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.219485] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0f23944-6afb-480a-8b3f-1f918b7b1dc3 tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1775.219485] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f0f23944-6afb-480a-8b3f-1f918b7b1dc3 tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1775.219485] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-95de0042-52d2-4229-b5a0-edf1238a4254 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.225172] env[62684]: DEBUG oslo_concurrency.lockutils [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Releasing lock "refresh_cache-a4767855-0c1d-48c8-98cc-6532ff140b5c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1775.225172] env[62684]: DEBUG nova.compute.manager [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Instance network_info: |[{"id": "292689a2-e664-4a36-bbc0-9f7465f7d256", "address": "fa:16:3e:18:64:ad", "network": {"id": "09f3690c-22a5-4b7d-9746-648a3bea66a4", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-124665865-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98ce92c22eff45fdbd73acff31aca8dd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d39252e-42ef-4252-98d3-62af5a0d109d", "external-id": "nsx-vlan-transportzone-190", "segmentation_id": 190, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap292689a2-e6", "ovs_interfaceid": "292689a2-e664-4a36-bbc0-9f7465f7d256", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1775.225328] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:18:64:ad', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9d39252e-42ef-4252-98d3-62af5a0d109d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '292689a2-e664-4a36-bbc0-9f7465f7d256', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1775.234161] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Creating folder: Project (98ce92c22eff45fdbd73acff31aca8dd). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1775.234161] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-82f6eeb5-cef9-4173-87dc-9a37121ef72d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.248548] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Created folder: Project (98ce92c22eff45fdbd73acff31aca8dd) in parent group-v421118. [ 1775.248548] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Creating folder: Instances. Parent ref: group-v421183. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1775.248548] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cf2345a9-1c3b-4e97-8fb1-5468cafb95ac {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.263102] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Created folder: Instances in parent group-v421183. [ 1775.263102] env[62684]: DEBUG oslo.service.loopingcall [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1775.263102] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1775.263102] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1e270bfb-af4c-4d54-ad69-e52853f47e69 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.289239] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1775.289239] env[62684]: value = "task-2052469" [ 1775.289239] env[62684]: _type = "Task" [ 1775.289239] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.303921] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052469, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.305504] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f0f23944-6afb-480a-8b3f-1f918b7b1dc3 tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1775.305726] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f0f23944-6afb-480a-8b3f-1f918b7b1dc3 tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1775.305922] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0f23944-6afb-480a-8b3f-1f918b7b1dc3 tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Deleting the datastore file [datastore1] aec16a15-5d75-4ea6-800b-1bf67f762d89 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1775.306199] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3e5ef639-da12-4cd1-ad81-379766d7988d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.313551] env[62684]: DEBUG oslo_vmware.api [None req-f0f23944-6afb-480a-8b3f-1f918b7b1dc3 tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Waiting for the task: (returnval){ [ 1775.313551] env[62684]: value = "task-2052470" [ 1775.313551] env[62684]: _type = "Task" [ 1775.313551] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.322703] env[62684]: DEBUG oslo_vmware.api [None req-f0f23944-6afb-480a-8b3f-1f918b7b1dc3 tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Task: {'id': task-2052470, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.400818] env[62684]: DEBUG oslo_concurrency.lockutils [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquiring lock "refresh_cache-0676806b-c1f0-4c1a-a12d-add2edf1588f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1775.400818] env[62684]: DEBUG oslo_concurrency.lockutils [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquired lock "refresh_cache-0676806b-c1f0-4c1a-a12d-add2edf1588f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1775.400957] env[62684]: DEBUG nova.network.neutron [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1775.414024] env[62684]: DEBUG oslo_vmware.api [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052465, 'name': Destroy_Task, 'duration_secs': 0.94045} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.414372] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Destroyed the VM [ 1775.415749] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Deleting Snapshot of the VM instance {{(pid=62684) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1775.415749] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-8e165d38-f980-4f22-b62e-c0706789b2fa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.427220] env[62684]: DEBUG oslo_vmware.api [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Waiting for the task: (returnval){ [ 1775.427220] env[62684]: value = "task-2052471" [ 1775.427220] env[62684]: _type = "Task" [ 1775.427220] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.436153] env[62684]: DEBUG oslo_vmware.api [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052471, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.488535] env[62684]: DEBUG oslo_vmware.api [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52743691-612c-891e-3c42-7923030bd9ab, 'name': SearchDatastore_Task, 'duration_secs': 0.018673} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.489389] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1775.489389] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1775.490603] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1775.490603] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1775.490603] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1775.490603] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f42df447-ab5b-4822-9903-47f5358a9d4f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.501599] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1775.501822] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1775.502632] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c347f827-2a2b-46e0-9737-8229d8c3f9ed {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.513758] env[62684]: DEBUG oslo_vmware.api [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Waiting for the task: (returnval){ [ 1775.513758] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52fa56b1-5f87-cc9e-6679-f391fccf9e04" [ 1775.513758] env[62684]: _type = "Task" [ 1775.513758] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.523585] env[62684]: DEBUG oslo_vmware.api [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52fa56b1-5f87-cc9e-6679-f391fccf9e04, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.535340] env[62684]: DEBUG oslo_concurrency.lockutils [None req-55b959b2-8e05-47b8-a053-7dc19cb969a3 tempest-ServerPasswordTestJSON-1779346823 tempest-ServerPasswordTestJSON-1779346823-project-member] Lock "91869c00-edd0-40a8-84df-d8842d750558" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.031s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1775.790816] env[62684]: DEBUG nova.compute.manager [req-e502c04f-3ef1-4f2b-bfbd-705dfa39a6ec req-a6ac783f-cfdd-4998-8dfa-3b0d00114f78 service nova] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Received event network-vif-plugged-292689a2-e664-4a36-bbc0-9f7465f7d256 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1775.790816] env[62684]: DEBUG oslo_concurrency.lockutils [req-e502c04f-3ef1-4f2b-bfbd-705dfa39a6ec req-a6ac783f-cfdd-4998-8dfa-3b0d00114f78 service nova] Acquiring lock "a4767855-0c1d-48c8-98cc-6532ff140b5c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1775.790929] env[62684]: DEBUG oslo_concurrency.lockutils [req-e502c04f-3ef1-4f2b-bfbd-705dfa39a6ec req-a6ac783f-cfdd-4998-8dfa-3b0d00114f78 service nova] Lock "a4767855-0c1d-48c8-98cc-6532ff140b5c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1775.791078] env[62684]: DEBUG oslo_concurrency.lockutils [req-e502c04f-3ef1-4f2b-bfbd-705dfa39a6ec req-a6ac783f-cfdd-4998-8dfa-3b0d00114f78 service nova] Lock "a4767855-0c1d-48c8-98cc-6532ff140b5c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1775.791269] env[62684]: DEBUG nova.compute.manager [req-e502c04f-3ef1-4f2b-bfbd-705dfa39a6ec req-a6ac783f-cfdd-4998-8dfa-3b0d00114f78 service nova] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] No waiting events found dispatching network-vif-plugged-292689a2-e664-4a36-bbc0-9f7465f7d256 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1775.791865] env[62684]: WARNING nova.compute.manager [req-e502c04f-3ef1-4f2b-bfbd-705dfa39a6ec req-a6ac783f-cfdd-4998-8dfa-3b0d00114f78 service nova] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Received unexpected event network-vif-plugged-292689a2-e664-4a36-bbc0-9f7465f7d256 for instance with vm_state building and task_state spawning. [ 1775.791865] env[62684]: DEBUG nova.compute.manager [req-e502c04f-3ef1-4f2b-bfbd-705dfa39a6ec req-a6ac783f-cfdd-4998-8dfa-3b0d00114f78 service nova] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Received event network-changed-292689a2-e664-4a36-bbc0-9f7465f7d256 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1775.791865] env[62684]: DEBUG nova.compute.manager [req-e502c04f-3ef1-4f2b-bfbd-705dfa39a6ec req-a6ac783f-cfdd-4998-8dfa-3b0d00114f78 service nova] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Refreshing instance network info cache due to event network-changed-292689a2-e664-4a36-bbc0-9f7465f7d256. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1775.792028] env[62684]: DEBUG oslo_concurrency.lockutils [req-e502c04f-3ef1-4f2b-bfbd-705dfa39a6ec req-a6ac783f-cfdd-4998-8dfa-3b0d00114f78 service nova] Acquiring lock "refresh_cache-a4767855-0c1d-48c8-98cc-6532ff140b5c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1775.792174] env[62684]: DEBUG oslo_concurrency.lockutils [req-e502c04f-3ef1-4f2b-bfbd-705dfa39a6ec req-a6ac783f-cfdd-4998-8dfa-3b0d00114f78 service nova] Acquired lock "refresh_cache-a4767855-0c1d-48c8-98cc-6532ff140b5c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1775.792441] env[62684]: DEBUG nova.network.neutron [req-e502c04f-3ef1-4f2b-bfbd-705dfa39a6ec req-a6ac783f-cfdd-4998-8dfa-3b0d00114f78 service nova] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Refreshing network info cache for port 292689a2-e664-4a36-bbc0-9f7465f7d256 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1775.803685] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052469, 'name': CreateVM_Task, 'duration_secs': 0.371163} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.804428] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1775.806520] env[62684]: DEBUG oslo_concurrency.lockutils [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1775.806520] env[62684]: DEBUG oslo_concurrency.lockutils [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1775.806520] env[62684]: DEBUG oslo_concurrency.lockutils [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1775.806520] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22a04d3a-5917-4f13-b874-a57a5f706269 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.813504] env[62684]: DEBUG oslo_vmware.api [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Waiting for the task: (returnval){ [ 1775.813504] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5294a9ee-6653-1044-4a39-3d3b00944473" [ 1775.813504] env[62684]: _type = "Task" [ 1775.813504] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.824822] env[62684]: DEBUG oslo_vmware.api [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5294a9ee-6653-1044-4a39-3d3b00944473, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.827781] env[62684]: DEBUG oslo_vmware.api [None req-f0f23944-6afb-480a-8b3f-1f918b7b1dc3 tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Task: {'id': task-2052470, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.239471} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.827781] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0f23944-6afb-480a-8b3f-1f918b7b1dc3 tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1775.828034] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f0f23944-6afb-480a-8b3f-1f918b7b1dc3 tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1775.828099] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f0f23944-6afb-480a-8b3f-1f918b7b1dc3 tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1775.828271] env[62684]: INFO nova.compute.manager [None req-f0f23944-6afb-480a-8b3f-1f918b7b1dc3 tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Took 2.15 seconds to destroy the instance on the hypervisor. [ 1775.828498] env[62684]: DEBUG oslo.service.loopingcall [None req-f0f23944-6afb-480a-8b3f-1f918b7b1dc3 tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1775.829570] env[62684]: DEBUG nova.compute.manager [-] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1775.829570] env[62684]: DEBUG nova.network.neutron [-] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1775.859307] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquiring lock "4a15d298-115f-4132-8be0-00e623fa21d8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1775.859555] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "4a15d298-115f-4132-8be0-00e623fa21d8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1775.935475] env[62684]: DEBUG oslo_vmware.api [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052471, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.024490] env[62684]: DEBUG oslo_vmware.api [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52fa56b1-5f87-cc9e-6679-f391fccf9e04, 'name': SearchDatastore_Task, 'duration_secs': 0.010536} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1776.025393] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96f0277f-7472-4d5c-9506-1cc578745ae7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.034873] env[62684]: DEBUG oslo_vmware.api [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Waiting for the task: (returnval){ [ 1776.034873] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]525022e3-4fc9-838f-a715-994053e8b8a4" [ 1776.034873] env[62684]: _type = "Task" [ 1776.034873] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1776.043652] env[62684]: DEBUG oslo_vmware.api [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]525022e3-4fc9-838f-a715-994053e8b8a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.308189] env[62684]: DEBUG nova.network.neutron [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Updating instance_info_cache with network_info: [{"id": "10023d3d-f0cd-49c9-984f-fb3f2af83e3b", "address": "fa:16:3e:2d:b8:1b", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.60", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10023d3d-f0", "ovs_interfaceid": "10023d3d-f0cd-49c9-984f-fb3f2af83e3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1776.330844] env[62684]: DEBUG oslo_vmware.api [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5294a9ee-6653-1044-4a39-3d3b00944473, 'name': SearchDatastore_Task, 'duration_secs': 0.014762} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1776.331179] env[62684]: DEBUG oslo_concurrency.lockutils [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1776.331416] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1776.331743] env[62684]: DEBUG oslo_concurrency.lockutils [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1776.441943] env[62684]: DEBUG oslo_vmware.api [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052471, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.536387] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d2a14e2-741e-4c5d-b276-ca16b171a7e8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.553562] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfbba471-c8f3-4ea5-9aad-b0dd2c398564 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.557754] env[62684]: DEBUG oslo_vmware.api [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]525022e3-4fc9-838f-a715-994053e8b8a4, 'name': SearchDatastore_Task, 'duration_secs': 0.012404} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1776.558286] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1776.558970] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 0dbd52ac-c987-4728-974e-73e99465c5e7/0dbd52ac-c987-4728-974e-73e99465c5e7.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1776.560013] env[62684]: DEBUG oslo_concurrency.lockutils [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1776.560013] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1776.560013] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a91d2aaa-1411-431d-9d84-0f78660efff5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.561479] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-07d8d81d-c993-4f61-892b-5a52134b81e5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.589083] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9672991-363a-4613-aeaa-9d96a4af403f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.599250] env[62684]: DEBUG oslo_vmware.api [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Waiting for the task: (returnval){ [ 1776.599250] env[62684]: value = "task-2052472" [ 1776.599250] env[62684]: _type = "Task" [ 1776.599250] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1776.599575] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1776.599753] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1776.605420] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a62bddfa-325a-4b58-99ac-2689ccb19c56 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.609480] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4bf49944-467c-4af2-880e-a84d673e023d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.620610] env[62684]: DEBUG oslo_vmware.api [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Task: {'id': task-2052472, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.630985] env[62684]: DEBUG oslo_vmware.api [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Waiting for the task: (returnval){ [ 1776.630985] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ec0d16-d3de-c642-bfa7-57d7ca9c7104" [ 1776.630985] env[62684]: _type = "Task" [ 1776.630985] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1776.631572] env[62684]: DEBUG nova.compute.provider_tree [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1776.642840] env[62684]: DEBUG oslo_vmware.api [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ec0d16-d3de-c642-bfa7-57d7ca9c7104, 'name': SearchDatastore_Task, 'duration_secs': 0.015405} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1776.644286] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ff060d9-5d8a-4261-830d-c6e6fab3cc44 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.652980] env[62684]: DEBUG oslo_vmware.api [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Waiting for the task: (returnval){ [ 1776.652980] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5258025a-59ac-617a-a0a1-28f6022a7050" [ 1776.652980] env[62684]: _type = "Task" [ 1776.652980] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1776.661839] env[62684]: DEBUG oslo_vmware.api [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5258025a-59ac-617a-a0a1-28f6022a7050, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.755126] env[62684]: DEBUG nova.network.neutron [req-e502c04f-3ef1-4f2b-bfbd-705dfa39a6ec req-a6ac783f-cfdd-4998-8dfa-3b0d00114f78 service nova] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Updated VIF entry in instance network info cache for port 292689a2-e664-4a36-bbc0-9f7465f7d256. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1776.755579] env[62684]: DEBUG nova.network.neutron [req-e502c04f-3ef1-4f2b-bfbd-705dfa39a6ec req-a6ac783f-cfdd-4998-8dfa-3b0d00114f78 service nova] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Updating instance_info_cache with network_info: [{"id": "292689a2-e664-4a36-bbc0-9f7465f7d256", "address": "fa:16:3e:18:64:ad", "network": {"id": "09f3690c-22a5-4b7d-9746-648a3bea66a4", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-124665865-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98ce92c22eff45fdbd73acff31aca8dd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d39252e-42ef-4252-98d3-62af5a0d109d", "external-id": "nsx-vlan-transportzone-190", "segmentation_id": 190, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap292689a2-e6", "ovs_interfaceid": "292689a2-e664-4a36-bbc0-9f7465f7d256", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1776.810228] env[62684]: DEBUG oslo_concurrency.lockutils [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Releasing lock "refresh_cache-0676806b-c1f0-4c1a-a12d-add2edf1588f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1776.814783] env[62684]: DEBUG nova.network.neutron [-] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1776.905774] env[62684]: DEBUG nova.network.neutron [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Updating instance_info_cache with network_info: [{"id": "5868c4da-5351-4d35-8886-12ba976894db", "address": "fa:16:3e:26:ba:29", "network": {"id": "ad517579-bdcb-4ccc-8e16-74f3524aa5f6", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1045737469-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c3a7760a18d54bc4b8b4fd291e127381", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "111a2767-1b06-4fe5-852b-40c9b5a699fd", "external-id": "nsx-vlan-transportzone-975", "segmentation_id": 975, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5868c4da-53", "ovs_interfaceid": "5868c4da-5351-4d35-8886-12ba976894db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1776.938317] env[62684]: DEBUG oslo_vmware.api [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052471, 'name': RemoveSnapshot_Task, 'duration_secs': 1.219806} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1776.938689] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Deleted Snapshot of the VM instance {{(pid=62684) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1776.939096] env[62684]: INFO nova.compute.manager [None req-ee521fae-c6b7-4313-a767-ecf544b69d64 tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Took 19.29 seconds to snapshot the instance on the hypervisor. [ 1777.111690] env[62684]: DEBUG oslo_vmware.api [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Task: {'id': task-2052472, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.138027] env[62684]: DEBUG nova.scheduler.client.report [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1777.167169] env[62684]: DEBUG oslo_vmware.api [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5258025a-59ac-617a-a0a1-28f6022a7050, 'name': SearchDatastore_Task, 'duration_secs': 0.010511} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1777.167486] env[62684]: DEBUG oslo_concurrency.lockutils [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1777.168157] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] a4767855-0c1d-48c8-98cc-6532ff140b5c/a4767855-0c1d-48c8-98cc-6532ff140b5c.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1777.168157] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1a967f7f-61bf-4a7a-9ee8-306094547dcd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.180497] env[62684]: DEBUG oslo_vmware.api [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Waiting for the task: (returnval){ [ 1777.180497] env[62684]: value = "task-2052473" [ 1777.180497] env[62684]: _type = "Task" [ 1777.180497] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1777.191855] env[62684]: DEBUG oslo_vmware.api [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Task: {'id': task-2052473, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.238822] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquiring lock "b4cd871a-30ea-4b7a-98ad-00b8676dc2cd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1777.239341] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lock "b4cd871a-30ea-4b7a-98ad-00b8676dc2cd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1777.259710] env[62684]: DEBUG oslo_concurrency.lockutils [req-e502c04f-3ef1-4f2b-bfbd-705dfa39a6ec req-a6ac783f-cfdd-4998-8dfa-3b0d00114f78 service nova] Releasing lock "refresh_cache-a4767855-0c1d-48c8-98cc-6532ff140b5c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1777.320969] env[62684]: INFO nova.compute.manager [-] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Took 1.49 seconds to deallocate network for instance. [ 1777.355123] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81b0eea8-eaad-4ca4-b108-47b74fd49516 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.378866] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa21214a-e2dd-4fe7-952f-95d5e1f4cb80 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.389169] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Updating instance '0676806b-c1f0-4c1a-a12d-add2edf1588f' progress to 83 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1777.409907] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Releasing lock "refresh_cache-0f9a525c-09b9-483e-b418-fea6e6e5dc4a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1777.409907] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Updated the network info_cache for instance {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 1777.409907] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1777.409907] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62684) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1777.409907] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1777.615040] env[62684]: DEBUG oslo_vmware.api [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Task: {'id': task-2052472, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.696509} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1777.615040] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 0dbd52ac-c987-4728-974e-73e99465c5e7/0dbd52ac-c987-4728-974e-73e99465c5e7.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1777.615040] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1777.615040] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d30d553d-77bb-41b8-8725-753d286c7653 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.622477] env[62684]: DEBUG oslo_vmware.api [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Waiting for the task: (returnval){ [ 1777.622477] env[62684]: value = "task-2052474" [ 1777.622477] env[62684]: _type = "Task" [ 1777.622477] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1777.632080] env[62684]: DEBUG oslo_vmware.api [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Task: {'id': task-2052474, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.643167] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.656s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1777.643703] env[62684]: DEBUG nova.compute.manager [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1777.646466] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.479s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1777.647890] env[62684]: INFO nova.compute.claims [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1777.695611] env[62684]: DEBUG oslo_vmware.api [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Task: {'id': task-2052473, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.832205] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f0f23944-6afb-480a-8b3f-1f918b7b1dc3 tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1777.898922] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1777.899329] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-67c4c7ca-ca17-419d-a6d7-604045fa4452 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.911407] env[62684]: DEBUG oslo_vmware.api [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for the task: (returnval){ [ 1777.911407] env[62684]: value = "task-2052475" [ 1777.911407] env[62684]: _type = "Task" [ 1777.911407] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1777.917904] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1777.926703] env[62684]: DEBUG oslo_vmware.api [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052475, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.033811] env[62684]: DEBUG nova.compute.manager [req-299fd43a-6284-45a8-b704-ef4fa999e7d3 req-1e4d5c9d-1723-4972-a6e6-6c9391a916a0 service nova] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Received event network-vif-deleted-d94a0e39-627d-4191-9011-76da63ed1d8f {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1778.133579] env[62684]: DEBUG oslo_vmware.api [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Task: {'id': task-2052474, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065624} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1778.133579] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1778.135431] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6577507-8fd5-498b-a813-9bdc729dc32d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.153646] env[62684]: DEBUG nova.compute.utils [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1778.165873] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Reconfiguring VM instance instance-00000013 to attach disk [datastore2] 0dbd52ac-c987-4728-974e-73e99465c5e7/0dbd52ac-c987-4728-974e-73e99465c5e7.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1778.166738] env[62684]: DEBUG nova.compute.manager [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1778.166948] env[62684]: DEBUG nova.network.neutron [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] [instance: 17d30180-9770-4329-a6d8-757a93514a96] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1778.171503] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-86506823-85b9-4d14-b149-aacd8fe56b7c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.201424] env[62684]: DEBUG oslo_vmware.api [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Task: {'id': task-2052473, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.810241} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1778.203324] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] a4767855-0c1d-48c8-98cc-6532ff140b5c/a4767855-0c1d-48c8-98cc-6532ff140b5c.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1778.204087] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1778.204480] env[62684]: DEBUG oslo_vmware.api [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Waiting for the task: (returnval){ [ 1778.204480] env[62684]: value = "task-2052476" [ 1778.204480] env[62684]: _type = "Task" [ 1778.204480] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1778.204986] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c83a1cba-861d-4746-b470-e819bb1ec47d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.221317] env[62684]: DEBUG oslo_vmware.api [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Task: {'id': task-2052476, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.223432] env[62684]: DEBUG oslo_vmware.api [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Waiting for the task: (returnval){ [ 1778.223432] env[62684]: value = "task-2052477" [ 1778.223432] env[62684]: _type = "Task" [ 1778.223432] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1778.234955] env[62684]: DEBUG oslo_vmware.api [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Task: {'id': task-2052477, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.263793] env[62684]: DEBUG nova.policy [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8560ad8367b54805a74f4fa11f398633', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2a997a73719f4eb5b3e35640f6c9f57f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1778.427546] env[62684]: DEBUG oslo_vmware.api [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052475, 'name': PowerOnVM_Task, 'duration_secs': 0.436068} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1778.427846] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1778.428046] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-64946d15-bc57-44fb-a8a7-0397ce05f7b6 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Updating instance '0676806b-c1f0-4c1a-a12d-add2edf1588f' progress to 100 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1778.667272] env[62684]: DEBUG nova.compute.manager [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1778.717838] env[62684]: DEBUG oslo_vmware.api [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Task: {'id': task-2052476, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.738803] env[62684]: DEBUG oslo_vmware.api [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Task: {'id': task-2052477, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.108219} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1778.739106] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1778.740129] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-985e55d1-cbbd-4a46-9710-96f6ed402590 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.767950] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Reconfiguring VM instance instance-00000014 to attach disk [datastore2] a4767855-0c1d-48c8-98cc-6532ff140b5c/a4767855-0c1d-48c8-98cc-6532ff140b5c.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1778.771594] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-33d32717-d306-47e8-a22c-ba4beae79859 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.787739] env[62684]: DEBUG nova.network.neutron [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Successfully created port: f30c0c93-502e-48a6-b8f1-c44350487322 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1778.797015] env[62684]: DEBUG oslo_vmware.api [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Waiting for the task: (returnval){ [ 1778.797015] env[62684]: value = "task-2052478" [ 1778.797015] env[62684]: _type = "Task" [ 1778.797015] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1778.810347] env[62684]: DEBUG oslo_vmware.api [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Task: {'id': task-2052478, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.869109] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c11d13a4-9562-4dbc-94ba-facca48d849b tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Acquiring lock "0f9a525c-09b9-483e-b418-fea6e6e5dc4a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1778.869955] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c11d13a4-9562-4dbc-94ba-facca48d849b tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Lock "0f9a525c-09b9-483e-b418-fea6e6e5dc4a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1778.870762] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c11d13a4-9562-4dbc-94ba-facca48d849b tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Acquiring lock "0f9a525c-09b9-483e-b418-fea6e6e5dc4a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1778.870762] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c11d13a4-9562-4dbc-94ba-facca48d849b tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Lock "0f9a525c-09b9-483e-b418-fea6e6e5dc4a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1778.870868] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c11d13a4-9562-4dbc-94ba-facca48d849b tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Lock "0f9a525c-09b9-483e-b418-fea6e6e5dc4a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1778.876376] env[62684]: INFO nova.compute.manager [None req-c11d13a4-9562-4dbc-94ba-facca48d849b tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Terminating instance [ 1778.878789] env[62684]: DEBUG nova.compute.manager [None req-c11d13a4-9562-4dbc-94ba-facca48d849b tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1778.879140] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c11d13a4-9562-4dbc-94ba-facca48d849b tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1778.880177] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bfbd8c2-0838-41b6-82df-be3bd767f306 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.892959] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-c11d13a4-9562-4dbc-94ba-facca48d849b tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1778.894366] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9ae9ac60-d3c8-4a66-823e-fd93535d99e5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.903258] env[62684]: DEBUG oslo_vmware.api [None req-c11d13a4-9562-4dbc-94ba-facca48d849b tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Waiting for the task: (returnval){ [ 1778.903258] env[62684]: value = "task-2052479" [ 1778.903258] env[62684]: _type = "Task" [ 1778.903258] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1778.914077] env[62684]: DEBUG oslo_vmware.api [None req-c11d13a4-9562-4dbc-94ba-facca48d849b tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052479, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.221657] env[62684]: DEBUG oslo_vmware.api [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Task: {'id': task-2052476, 'name': ReconfigVM_Task, 'duration_secs': 0.817736} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1779.222208] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Reconfigured VM instance instance-00000013 to attach disk [datastore2] 0dbd52ac-c987-4728-974e-73e99465c5e7/0dbd52ac-c987-4728-974e-73e99465c5e7.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1779.223376] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-81ee4284-3828-4958-866a-4f263b04f233 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.235624] env[62684]: DEBUG oslo_vmware.api [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Waiting for the task: (returnval){ [ 1779.235624] env[62684]: value = "task-2052480" [ 1779.235624] env[62684]: _type = "Task" [ 1779.235624] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.247539] env[62684]: DEBUG oslo_vmware.api [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Task: {'id': task-2052480, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.299012] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdae8a7a-3795-4740-a55c-6d28f7891147 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.312787] env[62684]: DEBUG oslo_vmware.api [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Task: {'id': task-2052478, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.315834] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e432adc9-168e-4030-aa50-7552483161b1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.351547] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b811f69-9e01-4e50-bf61-a8baf8595a03 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.361141] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9768ad57-9a6f-4df5-b174-918efdc955cf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.377413] env[62684]: DEBUG nova.compute.provider_tree [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1779.418951] env[62684]: DEBUG oslo_vmware.api [None req-c11d13a4-9562-4dbc-94ba-facca48d849b tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052479, 'name': PowerOffVM_Task, 'duration_secs': 0.240905} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1779.420015] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-c11d13a4-9562-4dbc-94ba-facca48d849b tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1779.420015] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c11d13a4-9562-4dbc-94ba-facca48d849b tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1779.421468] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c5e95ffb-f5bb-46aa-8810-990c6534ce00 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.511024] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c11d13a4-9562-4dbc-94ba-facca48d849b tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1779.511024] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c11d13a4-9562-4dbc-94ba-facca48d849b tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1779.511024] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-c11d13a4-9562-4dbc-94ba-facca48d849b tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Deleting the datastore file [datastore1] 0f9a525c-09b9-483e-b418-fea6e6e5dc4a {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1779.511024] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6a5de252-13a4-4e18-940f-717555235610 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.520686] env[62684]: DEBUG oslo_vmware.api [None req-c11d13a4-9562-4dbc-94ba-facca48d849b tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Waiting for the task: (returnval){ [ 1779.520686] env[62684]: value = "task-2052482" [ 1779.520686] env[62684]: _type = "Task" [ 1779.520686] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.529694] env[62684]: DEBUG oslo_vmware.api [None req-c11d13a4-9562-4dbc-94ba-facca48d849b tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052482, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.682353] env[62684]: DEBUG nova.compute.manager [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1779.715179] env[62684]: DEBUG nova.virt.hardware [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1779.715480] env[62684]: DEBUG nova.virt.hardware [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1779.715642] env[62684]: DEBUG nova.virt.hardware [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1779.715827] env[62684]: DEBUG nova.virt.hardware [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1779.715978] env[62684]: DEBUG nova.virt.hardware [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1779.716276] env[62684]: DEBUG nova.virt.hardware [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1779.716551] env[62684]: DEBUG nova.virt.hardware [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1779.716730] env[62684]: DEBUG nova.virt.hardware [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1779.716902] env[62684]: DEBUG nova.virt.hardware [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1779.717099] env[62684]: DEBUG nova.virt.hardware [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1779.717276] env[62684]: DEBUG nova.virt.hardware [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1779.718136] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aa1bd41-8a0f-4295-882a-8c4718908687 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.728029] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81c777d4-ab38-48e0-92da-2c02e4253117 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.753468] env[62684]: DEBUG oslo_vmware.api [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Task: {'id': task-2052480, 'name': Rename_Task, 'duration_secs': 0.279291} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1779.756784] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1779.756784] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-60f70469-c83c-4fb7-abae-7df120d0cf97 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.761664] env[62684]: DEBUG oslo_vmware.api [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Waiting for the task: (returnval){ [ 1779.761664] env[62684]: value = "task-2052483" [ 1779.761664] env[62684]: _type = "Task" [ 1779.761664] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.772060] env[62684]: DEBUG oslo_vmware.api [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Task: {'id': task-2052483, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.818439] env[62684]: DEBUG oslo_vmware.api [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Task: {'id': task-2052478, 'name': ReconfigVM_Task, 'duration_secs': 0.683229} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1779.818439] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Reconfigured VM instance instance-00000014 to attach disk [datastore2] a4767855-0c1d-48c8-98cc-6532ff140b5c/a4767855-0c1d-48c8-98cc-6532ff140b5c.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1779.818776] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b39996b2-0447-441b-b182-acf31d974aaa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.828369] env[62684]: DEBUG oslo_vmware.api [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Waiting for the task: (returnval){ [ 1779.828369] env[62684]: value = "task-2052484" [ 1779.828369] env[62684]: _type = "Task" [ 1779.828369] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.837683] env[62684]: DEBUG oslo_vmware.api [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Task: {'id': task-2052484, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.881955] env[62684]: DEBUG nova.scheduler.client.report [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1780.035026] env[62684]: DEBUG oslo_vmware.api [None req-c11d13a4-9562-4dbc-94ba-facca48d849b tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Task: {'id': task-2052482, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170608} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.035026] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-c11d13a4-9562-4dbc-94ba-facca48d849b tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1780.035026] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c11d13a4-9562-4dbc-94ba-facca48d849b tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1780.035026] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c11d13a4-9562-4dbc-94ba-facca48d849b tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1780.035026] env[62684]: INFO nova.compute.manager [None req-c11d13a4-9562-4dbc-94ba-facca48d849b tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1780.035489] env[62684]: DEBUG oslo.service.loopingcall [None req-c11d13a4-9562-4dbc-94ba-facca48d849b tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1780.035489] env[62684]: DEBUG nova.compute.manager [-] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1780.035489] env[62684]: DEBUG nova.network.neutron [-] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1780.276323] env[62684]: DEBUG oslo_vmware.api [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Task: {'id': task-2052483, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.344104] env[62684]: DEBUG oslo_vmware.api [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Task: {'id': task-2052484, 'name': Rename_Task, 'duration_secs': 0.201476} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.344981] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1780.345306] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-41b081b9-cec3-49b7-aae6-c2a9f77b74b2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.355916] env[62684]: DEBUG oslo_vmware.api [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Waiting for the task: (returnval){ [ 1780.355916] env[62684]: value = "task-2052485" [ 1780.355916] env[62684]: _type = "Task" [ 1780.355916] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1780.370192] env[62684]: DEBUG oslo_vmware.api [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Task: {'id': task-2052485, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.391151] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.745s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1780.391730] env[62684]: DEBUG nova.compute.manager [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1780.394664] env[62684]: DEBUG oslo_concurrency.lockutils [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.375s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1780.396375] env[62684]: INFO nova.compute.claims [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1780.773648] env[62684]: DEBUG oslo_vmware.api [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Task: {'id': task-2052483, 'name': PowerOnVM_Task, 'duration_secs': 0.861077} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.775017] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1780.775318] env[62684]: INFO nova.compute.manager [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Took 11.03 seconds to spawn the instance on the hypervisor. [ 1780.775887] env[62684]: DEBUG nova.compute.manager [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1780.776384] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c464687-f2cf-4069-87f5-f3a2f7a36f64 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.781455] env[62684]: DEBUG nova.compute.manager [req-9797075e-d1c7-4abc-89d5-589adef76a75 req-0e47ed9f-3370-45a3-aacf-7955d35579d0 service nova] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Received event network-vif-deleted-5868c4da-5351-4d35-8886-12ba976894db {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1780.781657] env[62684]: INFO nova.compute.manager [req-9797075e-d1c7-4abc-89d5-589adef76a75 req-0e47ed9f-3370-45a3-aacf-7955d35579d0 service nova] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Neutron deleted interface 5868c4da-5351-4d35-8886-12ba976894db; detaching it from the instance and deleting it from the info cache [ 1780.781828] env[62684]: DEBUG nova.network.neutron [req-9797075e-d1c7-4abc-89d5-589adef76a75 req-0e47ed9f-3370-45a3-aacf-7955d35579d0 service nova] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1780.867350] env[62684]: DEBUG oslo_vmware.api [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Task: {'id': task-2052485, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.901318] env[62684]: DEBUG nova.compute.utils [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1780.904795] env[62684]: DEBUG nova.compute.manager [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1780.904942] env[62684]: DEBUG nova.network.neutron [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1781.012703] env[62684]: DEBUG nova.policy [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '43dc74abd7c647c984aaf40d19ebd195', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd3ffe76851ed41d19f0b447f807a5f72', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1781.054013] env[62684]: DEBUG nova.network.neutron [-] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1781.077213] env[62684]: DEBUG nova.compute.manager [req-adff905f-1642-498d-9594-094d28a1f718 req-8c83ad73-4e36-4726-9ee3-6db9fbe49d89 service nova] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Received event network-vif-plugged-f30c0c93-502e-48a6-b8f1-c44350487322 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1781.077213] env[62684]: DEBUG oslo_concurrency.lockutils [req-adff905f-1642-498d-9594-094d28a1f718 req-8c83ad73-4e36-4726-9ee3-6db9fbe49d89 service nova] Acquiring lock "17d30180-9770-4329-a6d8-757a93514a96-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1781.077213] env[62684]: DEBUG oslo_concurrency.lockutils [req-adff905f-1642-498d-9594-094d28a1f718 req-8c83ad73-4e36-4726-9ee3-6db9fbe49d89 service nova] Lock "17d30180-9770-4329-a6d8-757a93514a96-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1781.080372] env[62684]: DEBUG oslo_concurrency.lockutils [req-adff905f-1642-498d-9594-094d28a1f718 req-8c83ad73-4e36-4726-9ee3-6db9fbe49d89 service nova] Lock "17d30180-9770-4329-a6d8-757a93514a96-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1781.080649] env[62684]: DEBUG nova.compute.manager [req-adff905f-1642-498d-9594-094d28a1f718 req-8c83ad73-4e36-4726-9ee3-6db9fbe49d89 service nova] [instance: 17d30180-9770-4329-a6d8-757a93514a96] No waiting events found dispatching network-vif-plugged-f30c0c93-502e-48a6-b8f1-c44350487322 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1781.080853] env[62684]: WARNING nova.compute.manager [req-adff905f-1642-498d-9594-094d28a1f718 req-8c83ad73-4e36-4726-9ee3-6db9fbe49d89 service nova] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Received unexpected event network-vif-plugged-f30c0c93-502e-48a6-b8f1-c44350487322 for instance with vm_state building and task_state spawning. [ 1781.152267] env[62684]: DEBUG nova.network.neutron [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Successfully updated port: f30c0c93-502e-48a6-b8f1-c44350487322 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1781.288061] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ad250475-e79a-499b-9f60-f3f2b1536037 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.303559] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c880490b-eab2-4c6d-b257-a65fff4678d5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.314961] env[62684]: INFO nova.compute.manager [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Took 38.64 seconds to build instance. [ 1781.343408] env[62684]: DEBUG nova.compute.manager [req-9797075e-d1c7-4abc-89d5-589adef76a75 req-0e47ed9f-3370-45a3-aacf-7955d35579d0 service nova] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Detach interface failed, port_id=5868c4da-5351-4d35-8886-12ba976894db, reason: Instance 0f9a525c-09b9-483e-b418-fea6e6e5dc4a could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1781.368291] env[62684]: DEBUG oslo_vmware.api [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Task: {'id': task-2052485, 'name': PowerOnVM_Task, 'duration_secs': 0.752789} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1781.368533] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1781.368734] env[62684]: INFO nova.compute.manager [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Took 8.88 seconds to spawn the instance on the hypervisor. [ 1781.368964] env[62684]: DEBUG nova.compute.manager [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1781.369701] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cd373a9-1d01-418a-9c4b-27a619c9fe64 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.410751] env[62684]: DEBUG nova.compute.manager [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1781.557397] env[62684]: INFO nova.compute.manager [-] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Took 1.52 seconds to deallocate network for instance. [ 1781.655081] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Acquiring lock "refresh_cache-17d30180-9770-4329-a6d8-757a93514a96" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1781.655237] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Acquired lock "refresh_cache-17d30180-9770-4329-a6d8-757a93514a96" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1781.655389] env[62684]: DEBUG nova.network.neutron [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1781.818873] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8308c525-b436-4fe4-8c20-90ba2a3e1ae6 tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Lock "0dbd52ac-c987-4728-974e-73e99465c5e7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.222s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1781.893965] env[62684]: INFO nova.compute.manager [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Took 39.17 seconds to build instance. [ 1781.919235] env[62684]: DEBUG nova.network.neutron [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Successfully created port: 19976c5d-9288-4b98-b988-e0f5d4e855e9 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1781.962935] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb6fbfd8-0eff-4c37-a30f-c37f18712e94 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.970103] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3421b093-a4a7-42db-b6f8-721ef3bcf465 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquiring lock "0676806b-c1f0-4c1a-a12d-add2edf1588f" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1781.970798] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3421b093-a4a7-42db-b6f8-721ef3bcf465 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Lock "0676806b-c1f0-4c1a-a12d-add2edf1588f" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1781.973016] env[62684]: DEBUG nova.compute.manager [None req-3421b093-a4a7-42db-b6f8-721ef3bcf465 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Going to confirm migration 1 {{(pid=62684) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1781.981150] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2f6482e-5f5a-448b-b843-2b3688a2aa92 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.014529] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5cf3fb5-e9ef-4c09-abad-be8f7e42437d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.024797] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33c92cb5-058b-4d76-a117-fb9832a7fb8b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.042508] env[62684]: DEBUG nova.compute.provider_tree [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1782.067672] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c11d13a4-9562-4dbc-94ba-facca48d849b tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1782.319944] env[62684]: DEBUG nova.network.neutron [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1782.323392] env[62684]: DEBUG nova.compute.manager [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1782.397733] env[62684]: DEBUG oslo_concurrency.lockutils [None req-19860c38-e691-4999-bb02-97fd07432912 tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Lock "a4767855-0c1d-48c8-98cc-6532ff140b5c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.874s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1782.428488] env[62684]: DEBUG nova.compute.manager [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1782.455585] env[62684]: DEBUG nova.virt.hardware [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1782.456060] env[62684]: DEBUG nova.virt.hardware [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1782.456356] env[62684]: DEBUG nova.virt.hardware [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1782.456654] env[62684]: DEBUG nova.virt.hardware [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1782.456907] env[62684]: DEBUG nova.virt.hardware [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1782.460018] env[62684]: DEBUG nova.virt.hardware [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1782.460018] env[62684]: DEBUG nova.virt.hardware [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1782.460018] env[62684]: DEBUG nova.virt.hardware [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1782.460018] env[62684]: DEBUG nova.virt.hardware [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1782.460018] env[62684]: DEBUG nova.virt.hardware [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1782.460224] env[62684]: DEBUG nova.virt.hardware [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1782.460224] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c089a8bd-2eb4-4db2-85e8-b6abe164ffb8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.468431] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dda1361-90c9-4d56-80dd-0e1386215c9e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.549023] env[62684]: DEBUG nova.scheduler.client.report [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1782.605536] env[62684]: DEBUG oslo_concurrency.lockutils [None req-41d9f57e-f22c-49c2-9ae2-723f18b5410f tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Acquiring lock "a4767855-0c1d-48c8-98cc-6532ff140b5c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1782.605536] env[62684]: DEBUG oslo_concurrency.lockutils [None req-41d9f57e-f22c-49c2-9ae2-723f18b5410f tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Lock "a4767855-0c1d-48c8-98cc-6532ff140b5c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1782.605536] env[62684]: DEBUG oslo_concurrency.lockutils [None req-41d9f57e-f22c-49c2-9ae2-723f18b5410f tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Acquiring lock "a4767855-0c1d-48c8-98cc-6532ff140b5c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1782.605536] env[62684]: DEBUG oslo_concurrency.lockutils [None req-41d9f57e-f22c-49c2-9ae2-723f18b5410f tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Lock "a4767855-0c1d-48c8-98cc-6532ff140b5c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1782.605734] env[62684]: DEBUG oslo_concurrency.lockutils [None req-41d9f57e-f22c-49c2-9ae2-723f18b5410f tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Lock "a4767855-0c1d-48c8-98cc-6532ff140b5c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1782.606731] env[62684]: INFO nova.compute.manager [None req-41d9f57e-f22c-49c2-9ae2-723f18b5410f tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Terminating instance [ 1782.608881] env[62684]: DEBUG nova.compute.manager [None req-41d9f57e-f22c-49c2-9ae2-723f18b5410f tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1782.609290] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-41d9f57e-f22c-49c2-9ae2-723f18b5410f tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1782.610239] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59a46b98-8f6d-4592-92e7-71694d2ecf31 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.621223] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-41d9f57e-f22c-49c2-9ae2-723f18b5410f tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1782.621223] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9be66798-faeb-43d2-946b-48e94a1c4ae1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.628955] env[62684]: DEBUG oslo_vmware.api [None req-41d9f57e-f22c-49c2-9ae2-723f18b5410f tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Waiting for the task: (returnval){ [ 1782.628955] env[62684]: value = "task-2052486" [ 1782.628955] env[62684]: _type = "Task" [ 1782.628955] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1782.640461] env[62684]: DEBUG oslo_vmware.api [None req-41d9f57e-f22c-49c2-9ae2-723f18b5410f tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Task: {'id': task-2052486, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.697116] env[62684]: DEBUG nova.network.neutron [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Updating instance_info_cache with network_info: [{"id": "f30c0c93-502e-48a6-b8f1-c44350487322", "address": "fa:16:3e:e2:b9:d4", "network": {"id": "4dbf6e66-d1d2-4a75-95f1-83ee056bb69a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1488549197-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a997a73719f4eb5b3e35640f6c9f57f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d377d75-3add-4a15-8691-74b2eb010924", "external-id": "nsx-vlan-transportzone-71", "segmentation_id": 71, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf30c0c93-50", "ovs_interfaceid": "f30c0c93-502e-48a6-b8f1-c44350487322", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1782.704597] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3421b093-a4a7-42db-b6f8-721ef3bcf465 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquiring lock "refresh_cache-0676806b-c1f0-4c1a-a12d-add2edf1588f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1782.704597] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3421b093-a4a7-42db-b6f8-721ef3bcf465 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquired lock "refresh_cache-0676806b-c1f0-4c1a-a12d-add2edf1588f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1782.704597] env[62684]: DEBUG nova.network.neutron [None req-3421b093-a4a7-42db-b6f8-721ef3bcf465 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1782.704597] env[62684]: DEBUG nova.objects.instance [None req-3421b093-a4a7-42db-b6f8-721ef3bcf465 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Lazy-loading 'info_cache' on Instance uuid 0676806b-c1f0-4c1a-a12d-add2edf1588f {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1782.853969] env[62684]: DEBUG oslo_concurrency.lockutils [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1782.900281] env[62684]: DEBUG nova.compute.manager [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1783.053815] env[62684]: DEBUG oslo_concurrency.lockutils [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.657s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1783.054403] env[62684]: DEBUG nova.compute.manager [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1783.060362] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.980s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1783.060362] env[62684]: INFO nova.compute.claims [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1783.142569] env[62684]: DEBUG oslo_vmware.api [None req-41d9f57e-f22c-49c2-9ae2-723f18b5410f tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Task: {'id': task-2052486, 'name': PowerOffVM_Task, 'duration_secs': 0.193695} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1783.143341] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-41d9f57e-f22c-49c2-9ae2-723f18b5410f tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1783.143534] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-41d9f57e-f22c-49c2-9ae2-723f18b5410f tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1783.143856] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f5d68b0d-475f-4653-be4c-b863f88f80e3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.200241] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Releasing lock "refresh_cache-17d30180-9770-4329-a6d8-757a93514a96" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1783.200241] env[62684]: DEBUG nova.compute.manager [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Instance network_info: |[{"id": "f30c0c93-502e-48a6-b8f1-c44350487322", "address": "fa:16:3e:e2:b9:d4", "network": {"id": "4dbf6e66-d1d2-4a75-95f1-83ee056bb69a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1488549197-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a997a73719f4eb5b3e35640f6c9f57f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d377d75-3add-4a15-8691-74b2eb010924", "external-id": "nsx-vlan-transportzone-71", "segmentation_id": 71, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf30c0c93-50", "ovs_interfaceid": "f30c0c93-502e-48a6-b8f1-c44350487322", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1783.200484] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e2:b9:d4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7d377d75-3add-4a15-8691-74b2eb010924', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f30c0c93-502e-48a6-b8f1-c44350487322', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1783.209912] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Creating folder: Project (2a997a73719f4eb5b3e35640f6c9f57f). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1783.212527] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-80238fb2-7e03-47b3-a7a3-c119381f5edd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.228063] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Created folder: Project (2a997a73719f4eb5b3e35640f6c9f57f) in parent group-v421118. [ 1783.228749] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Creating folder: Instances. Parent ref: group-v421186. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1783.229236] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d96ec071-1216-48e7-85ce-faa7d2456dfc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.232633] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-41d9f57e-f22c-49c2-9ae2-723f18b5410f tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1783.232982] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-41d9f57e-f22c-49c2-9ae2-723f18b5410f tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1783.233362] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-41d9f57e-f22c-49c2-9ae2-723f18b5410f tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Deleting the datastore file [datastore2] a4767855-0c1d-48c8-98cc-6532ff140b5c {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1783.234062] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2cffd575-a359-4295-ab7b-a9aaa82c85f7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.246688] env[62684]: DEBUG oslo_vmware.api [None req-41d9f57e-f22c-49c2-9ae2-723f18b5410f tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Waiting for the task: (returnval){ [ 1783.246688] env[62684]: value = "task-2052490" [ 1783.246688] env[62684]: _type = "Task" [ 1783.246688] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.249726] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Created folder: Instances in parent group-v421186. [ 1783.250138] env[62684]: DEBUG oslo.service.loopingcall [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1783.255075] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1783.255352] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-530624ee-cb80-420a-81bc-d08a1204c29e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.286886] env[62684]: DEBUG oslo_vmware.api [None req-41d9f57e-f22c-49c2-9ae2-723f18b5410f tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Task: {'id': task-2052490, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.288706] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1783.288706] env[62684]: value = "task-2052491" [ 1783.288706] env[62684]: _type = "Task" [ 1783.288706] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.305234] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052491, 'name': CreateVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.381389] env[62684]: DEBUG nova.compute.manager [req-0a811a88-75af-412c-9a56-81715b2edca4 req-073c653b-1e13-42b7-a992-c3d963dad29e service nova] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Received event network-changed-f30c0c93-502e-48a6-b8f1-c44350487322 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1783.381636] env[62684]: DEBUG nova.compute.manager [req-0a811a88-75af-412c-9a56-81715b2edca4 req-073c653b-1e13-42b7-a992-c3d963dad29e service nova] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Refreshing instance network info cache due to event network-changed-f30c0c93-502e-48a6-b8f1-c44350487322. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1783.381755] env[62684]: DEBUG oslo_concurrency.lockutils [req-0a811a88-75af-412c-9a56-81715b2edca4 req-073c653b-1e13-42b7-a992-c3d963dad29e service nova] Acquiring lock "refresh_cache-17d30180-9770-4329-a6d8-757a93514a96" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1783.381910] env[62684]: DEBUG oslo_concurrency.lockutils [req-0a811a88-75af-412c-9a56-81715b2edca4 req-073c653b-1e13-42b7-a992-c3d963dad29e service nova] Acquired lock "refresh_cache-17d30180-9770-4329-a6d8-757a93514a96" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1783.386678] env[62684]: DEBUG nova.network.neutron [req-0a811a88-75af-412c-9a56-81715b2edca4 req-073c653b-1e13-42b7-a992-c3d963dad29e service nova] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Refreshing network info cache for port f30c0c93-502e-48a6-b8f1-c44350487322 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1783.433491] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1783.561983] env[62684]: DEBUG nova.compute.utils [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1783.568118] env[62684]: DEBUG nova.compute.manager [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1783.568118] env[62684]: DEBUG nova.network.neutron [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1783.709270] env[62684]: DEBUG nova.compute.manager [req-7bf650d0-3983-4620-bb22-29c877ad0d78 req-696570d3-7b13-4864-9788-a7661ab597f0 service nova] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Received event network-changed-de8de653-ec88-4a72-840c-27978f584581 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1783.709524] env[62684]: DEBUG nova.compute.manager [req-7bf650d0-3983-4620-bb22-29c877ad0d78 req-696570d3-7b13-4864-9788-a7661ab597f0 service nova] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Refreshing instance network info cache due to event network-changed-de8de653-ec88-4a72-840c-27978f584581. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1783.709771] env[62684]: DEBUG oslo_concurrency.lockutils [req-7bf650d0-3983-4620-bb22-29c877ad0d78 req-696570d3-7b13-4864-9788-a7661ab597f0 service nova] Acquiring lock "refresh_cache-0dbd52ac-c987-4728-974e-73e99465c5e7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1783.710399] env[62684]: DEBUG oslo_concurrency.lockutils [req-7bf650d0-3983-4620-bb22-29c877ad0d78 req-696570d3-7b13-4864-9788-a7661ab597f0 service nova] Acquired lock "refresh_cache-0dbd52ac-c987-4728-974e-73e99465c5e7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1783.710399] env[62684]: DEBUG nova.network.neutron [req-7bf650d0-3983-4620-bb22-29c877ad0d78 req-696570d3-7b13-4864-9788-a7661ab597f0 service nova] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Refreshing network info cache for port de8de653-ec88-4a72-840c-27978f584581 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1783.728008] env[62684]: DEBUG nova.policy [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '890eb2142b224f419ea944dff141330d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '131e2a2e3f70470bbcca23e556d09e6f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1783.763038] env[62684]: DEBUG oslo_vmware.api [None req-41d9f57e-f22c-49c2-9ae2-723f18b5410f tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Task: {'id': task-2052490, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.179456} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1783.763406] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-41d9f57e-f22c-49c2-9ae2-723f18b5410f tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1783.763678] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-41d9f57e-f22c-49c2-9ae2-723f18b5410f tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1783.763919] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-41d9f57e-f22c-49c2-9ae2-723f18b5410f tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1783.765432] env[62684]: INFO nova.compute.manager [None req-41d9f57e-f22c-49c2-9ae2-723f18b5410f tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1783.765432] env[62684]: DEBUG oslo.service.loopingcall [None req-41d9f57e-f22c-49c2-9ae2-723f18b5410f tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1783.765432] env[62684]: DEBUG nova.compute.manager [-] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1783.765432] env[62684]: DEBUG nova.network.neutron [-] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1783.806897] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052491, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.067358] env[62684]: DEBUG nova.compute.manager [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1784.310125] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052491, 'name': CreateVM_Task, 'duration_secs': 0.573051} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1784.310169] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1784.310874] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1784.311047] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1784.311389] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1784.311945] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-972c13e6-d22e-4a8b-8e71-de6edb216f7d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.317608] env[62684]: DEBUG oslo_vmware.api [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Waiting for the task: (returnval){ [ 1784.317608] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523eae59-3d93-0da7-7365-2a350245e2c2" [ 1784.317608] env[62684]: _type = "Task" [ 1784.317608] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1784.333390] env[62684]: DEBUG oslo_vmware.api [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523eae59-3d93-0da7-7365-2a350245e2c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.701283] env[62684]: DEBUG nova.network.neutron [-] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1784.725198] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5208027-8890-4f62-96fb-cdefb780f0da {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.737131] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70f354d4-376a-489b-b43b-7b649f749f5a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.775201] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cad3499-4937-4d60-9188-c9f6abc12f17 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.783038] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-811b37cf-8c7c-4a82-8172-bccdec5ce9f9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.807046] env[62684]: DEBUG nova.compute.provider_tree [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1784.833847] env[62684]: DEBUG oslo_vmware.api [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523eae59-3d93-0da7-7365-2a350245e2c2, 'name': SearchDatastore_Task, 'duration_secs': 0.029994} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1784.833847] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1784.833847] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1784.834050] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1784.834085] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1784.835051] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1784.835051] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ee3277ca-f2e9-440f-8713-288c9be1cddf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.838468] env[62684]: DEBUG nova.network.neutron [None req-3421b093-a4a7-42db-b6f8-721ef3bcf465 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Updating instance_info_cache with network_info: [{"id": "10023d3d-f0cd-49c9-984f-fb3f2af83e3b", "address": "fa:16:3e:2d:b8:1b", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.60", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10023d3d-f0", "ovs_interfaceid": "10023d3d-f0cd-49c9-984f-fb3f2af83e3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1784.843842] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1784.844341] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1784.844780] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1060258-9045-4af6-83af-6808b738249f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.850121] env[62684]: DEBUG oslo_vmware.api [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Waiting for the task: (returnval){ [ 1784.850121] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52580368-e793-7cc7-b9ab-c722f94a6e81" [ 1784.850121] env[62684]: _type = "Task" [ 1784.850121] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1784.859879] env[62684]: DEBUG oslo_vmware.api [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52580368-e793-7cc7-b9ab-c722f94a6e81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.921291] env[62684]: DEBUG nova.network.neutron [req-7bf650d0-3983-4620-bb22-29c877ad0d78 req-696570d3-7b13-4864-9788-a7661ab597f0 service nova] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Updated VIF entry in instance network info cache for port de8de653-ec88-4a72-840c-27978f584581. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1784.921714] env[62684]: DEBUG nova.network.neutron [req-7bf650d0-3983-4620-bb22-29c877ad0d78 req-696570d3-7b13-4864-9788-a7661ab597f0 service nova] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Updating instance_info_cache with network_info: [{"id": "de8de653-ec88-4a72-840c-27978f584581", "address": "fa:16:3e:49:29:8e", "network": {"id": "b71fbfa9-df50-40cb-95c3-272b6a724bc9", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-984806882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.225", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12b5d28eab2e49989d1e2f1a7e523eff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6eb7e3e9-5cc2-40f1-a6eb-f70f06531667", "external-id": "nsx-vlan-transportzone-938", "segmentation_id": 938, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde8de653-ec", "ovs_interfaceid": "de8de653-ec88-4a72-840c-27978f584581", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1785.079244] env[62684]: DEBUG nova.compute.manager [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1785.113247] env[62684]: DEBUG nova.virt.hardware [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1785.113247] env[62684]: DEBUG nova.virt.hardware [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1785.113247] env[62684]: DEBUG nova.virt.hardware [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1785.113382] env[62684]: DEBUG nova.virt.hardware [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1785.113382] env[62684]: DEBUG nova.virt.hardware [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1785.113382] env[62684]: DEBUG nova.virt.hardware [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1785.113382] env[62684]: DEBUG nova.virt.hardware [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1785.113382] env[62684]: DEBUG nova.virt.hardware [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1785.113568] env[62684]: DEBUG nova.virt.hardware [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1785.113568] env[62684]: DEBUG nova.virt.hardware [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1785.113568] env[62684]: DEBUG nova.virt.hardware [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1785.114095] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a85bde1f-01b0-4610-8eec-2ff26f35ee67 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.129045] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f49148c8-a80b-4c29-8b69-f79f91ab7a19 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.150043] env[62684]: DEBUG nova.network.neutron [req-0a811a88-75af-412c-9a56-81715b2edca4 req-073c653b-1e13-42b7-a992-c3d963dad29e service nova] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Updated VIF entry in instance network info cache for port f30c0c93-502e-48a6-b8f1-c44350487322. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1785.150133] env[62684]: DEBUG nova.network.neutron [req-0a811a88-75af-412c-9a56-81715b2edca4 req-073c653b-1e13-42b7-a992-c3d963dad29e service nova] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Updating instance_info_cache with network_info: [{"id": "f30c0c93-502e-48a6-b8f1-c44350487322", "address": "fa:16:3e:e2:b9:d4", "network": {"id": "4dbf6e66-d1d2-4a75-95f1-83ee056bb69a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1488549197-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a997a73719f4eb5b3e35640f6c9f57f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d377d75-3add-4a15-8691-74b2eb010924", "external-id": "nsx-vlan-transportzone-71", "segmentation_id": 71, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf30c0c93-50", "ovs_interfaceid": "f30c0c93-502e-48a6-b8f1-c44350487322", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1785.204533] env[62684]: INFO nova.compute.manager [-] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Took 1.44 seconds to deallocate network for instance. [ 1785.312336] env[62684]: DEBUG nova.scheduler.client.report [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1785.340628] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3421b093-a4a7-42db-b6f8-721ef3bcf465 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Releasing lock "refresh_cache-0676806b-c1f0-4c1a-a12d-add2edf1588f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1785.340901] env[62684]: DEBUG nova.objects.instance [None req-3421b093-a4a7-42db-b6f8-721ef3bcf465 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Lazy-loading 'migration_context' on Instance uuid 0676806b-c1f0-4c1a-a12d-add2edf1588f {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1785.352130] env[62684]: DEBUG nova.network.neutron [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Successfully created port: 6b84c070-8ca0-4da2-9936-7e97377d47ed {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1785.370106] env[62684]: DEBUG oslo_vmware.api [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52580368-e793-7cc7-b9ab-c722f94a6e81, 'name': SearchDatastore_Task, 'duration_secs': 0.008842} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1785.370106] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b40a0cf-d8d1-4aff-a2de-7495b0fe6f0d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.374974] env[62684]: DEBUG oslo_vmware.api [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Waiting for the task: (returnval){ [ 1785.374974] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520d481c-b780-3948-8bde-740a2e2526c1" [ 1785.374974] env[62684]: _type = "Task" [ 1785.374974] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1785.383409] env[62684]: DEBUG oslo_vmware.api [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520d481c-b780-3948-8bde-740a2e2526c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1785.430394] env[62684]: DEBUG oslo_concurrency.lockutils [req-7bf650d0-3983-4620-bb22-29c877ad0d78 req-696570d3-7b13-4864-9788-a7661ab597f0 service nova] Releasing lock "refresh_cache-0dbd52ac-c987-4728-974e-73e99465c5e7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1785.654332] env[62684]: DEBUG oslo_concurrency.lockutils [req-0a811a88-75af-412c-9a56-81715b2edca4 req-073c653b-1e13-42b7-a992-c3d963dad29e service nova] Releasing lock "refresh_cache-17d30180-9770-4329-a6d8-757a93514a96" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1785.711606] env[62684]: DEBUG oslo_concurrency.lockutils [None req-41d9f57e-f22c-49c2-9ae2-723f18b5410f tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1785.777487] env[62684]: DEBUG nova.compute.manager [req-d9c1c126-5615-4dfa-872a-6c952d337c36 req-e9fe9e8c-a63c-4fa0-8865-e00013244a12 service nova] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Received event network-vif-deleted-292689a2-e664-4a36-bbc0-9f7465f7d256 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1785.816111] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.757s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1785.818930] env[62684]: DEBUG nova.compute.manager [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1785.819741] env[62684]: DEBUG oslo_concurrency.lockutils [None req-05195ea6-e22f-4c68-86fd-f0dbbab0d69c tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.514s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1785.819858] env[62684]: DEBUG nova.objects.instance [None req-05195ea6-e22f-4c68-86fd-f0dbbab0d69c tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Lazy-loading 'resources' on Instance uuid e1540aa6-12a4-4cff-a444-d47ee66c78d7 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1785.845466] env[62684]: DEBUG nova.objects.base [None req-3421b093-a4a7-42db-b6f8-721ef3bcf465 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Object Instance<0676806b-c1f0-4c1a-a12d-add2edf1588f> lazy-loaded attributes: info_cache,migration_context {{(pid=62684) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1785.846734] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cac6565-a476-49b0-941e-0ba655ad0ead {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.872169] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5324d610-3e47-4c9c-ac81-327f9fdf8751 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.883747] env[62684]: DEBUG oslo_vmware.api [None req-3421b093-a4a7-42db-b6f8-721ef3bcf465 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for the task: (returnval){ [ 1785.883747] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]524538a7-feb4-8ef4-7fab-3a141b9f32ce" [ 1785.883747] env[62684]: _type = "Task" [ 1785.883747] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1785.892153] env[62684]: DEBUG oslo_vmware.api [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520d481c-b780-3948-8bde-740a2e2526c1, 'name': SearchDatastore_Task, 'duration_secs': 0.009861} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1785.899084] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1785.899877] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 17d30180-9770-4329-a6d8-757a93514a96/17d30180-9770-4329-a6d8-757a93514a96.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1785.900707] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ba04c6ed-cf4e-4756-9ad9-c55b3a9dcea3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.916244] env[62684]: DEBUG oslo_vmware.api [None req-3421b093-a4a7-42db-b6f8-721ef3bcf465 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]524538a7-feb4-8ef4-7fab-3a141b9f32ce, 'name': SearchDatastore_Task, 'duration_secs': 0.007595} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1785.919403] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3421b093-a4a7-42db-b6f8-721ef3bcf465 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1785.920520] env[62684]: DEBUG oslo_vmware.api [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Waiting for the task: (returnval){ [ 1785.920520] env[62684]: value = "task-2052492" [ 1785.920520] env[62684]: _type = "Task" [ 1785.920520] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1785.934384] env[62684]: DEBUG oslo_vmware.api [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Task: {'id': task-2052492, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.046291] env[62684]: DEBUG nova.network.neutron [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Successfully updated port: 19976c5d-9288-4b98-b988-e0f5d4e855e9 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1786.328954] env[62684]: DEBUG nova.compute.utils [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1786.332317] env[62684]: DEBUG nova.compute.manager [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1786.332317] env[62684]: DEBUG nova.network.neutron [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1786.434794] env[62684]: DEBUG oslo_vmware.api [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Task: {'id': task-2052492, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.482109} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1786.435593] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 17d30180-9770-4329-a6d8-757a93514a96/17d30180-9770-4329-a6d8-757a93514a96.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1786.435814] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1786.436290] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-70724ecf-12c7-43e2-bf68-dfa3492e3bd5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.443487] env[62684]: DEBUG oslo_vmware.api [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Waiting for the task: (returnval){ [ 1786.443487] env[62684]: value = "task-2052493" [ 1786.443487] env[62684]: _type = "Task" [ 1786.443487] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1786.455329] env[62684]: DEBUG oslo_vmware.api [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Task: {'id': task-2052493, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.510198] env[62684]: DEBUG nova.policy [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '43dc74abd7c647c984aaf40d19ebd195', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd3ffe76851ed41d19f0b447f807a5f72', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1786.549393] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Acquiring lock "refresh_cache-ab2c7cbe-6f46-4174-bffb-055a15f2d56b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1786.551237] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Acquired lock "refresh_cache-ab2c7cbe-6f46-4174-bffb-055a15f2d56b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1786.551237] env[62684]: DEBUG nova.network.neutron [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1786.836469] env[62684]: DEBUG nova.compute.manager [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1786.873057] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d6e1694-4142-46a5-a12b-663756ecc6c8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.882012] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6e96d69-6193-4b64-9c9c-4aece2ebb703 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.920508] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7970cd7-2067-4031-b8a2-44c89ce939d0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.930036] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d70ff55-8d3a-42fc-814c-463e32863054 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.945868] env[62684]: DEBUG nova.compute.provider_tree [None req-05195ea6-e22f-4c68-86fd-f0dbbab0d69c tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1786.955966] env[62684]: DEBUG oslo_vmware.api [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Task: {'id': task-2052493, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060269} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1786.956743] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1786.957594] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8d7d5e6-0f3c-415e-9df3-d57a540f0279 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.983185] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Reconfiguring VM instance instance-00000015 to attach disk [datastore1] 17d30180-9770-4329-a6d8-757a93514a96/17d30180-9770-4329-a6d8-757a93514a96.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1786.984105] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db72e6b5-4eca-4d46-9995-d668c2150e5d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.004284] env[62684]: DEBUG oslo_vmware.api [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Waiting for the task: (returnval){ [ 1787.004284] env[62684]: value = "task-2052494" [ 1787.004284] env[62684]: _type = "Task" [ 1787.004284] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.012381] env[62684]: DEBUG oslo_vmware.api [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Task: {'id': task-2052494, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.091031] env[62684]: DEBUG nova.network.neutron [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1787.304094] env[62684]: DEBUG nova.network.neutron [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Successfully created port: aa71af47-855f-4fc6-9a8d-ca724cde3d12 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1787.455163] env[62684]: DEBUG nova.scheduler.client.report [None req-05195ea6-e22f-4c68-86fd-f0dbbab0d69c tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1787.522227] env[62684]: DEBUG oslo_vmware.api [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Task: {'id': task-2052494, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.553922] env[62684]: DEBUG nova.network.neutron [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Updating instance_info_cache with network_info: [{"id": "19976c5d-9288-4b98-b988-e0f5d4e855e9", "address": "fa:16:3e:93:76:3d", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.218", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19976c5d-92", "ovs_interfaceid": "19976c5d-9288-4b98-b988-e0f5d4e855e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1787.761059] env[62684]: DEBUG nova.network.neutron [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Successfully updated port: 6b84c070-8ca0-4da2-9936-7e97377d47ed {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1787.852237] env[62684]: DEBUG nova.compute.manager [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1787.886571] env[62684]: DEBUG nova.virt.hardware [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1787.886878] env[62684]: DEBUG nova.virt.hardware [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1787.886989] env[62684]: DEBUG nova.virt.hardware [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1787.887280] env[62684]: DEBUG nova.virt.hardware [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1787.887525] env[62684]: DEBUG nova.virt.hardware [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1787.887575] env[62684]: DEBUG nova.virt.hardware [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1787.887954] env[62684]: DEBUG nova.virt.hardware [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1787.888214] env[62684]: DEBUG nova.virt.hardware [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1787.888560] env[62684]: DEBUG nova.virt.hardware [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1787.888560] env[62684]: DEBUG nova.virt.hardware [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1787.889592] env[62684]: DEBUG nova.virt.hardware [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1787.889795] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-034c23a0-06fa-4f90-80fa-ab59a1a77b5f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.902218] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-166ef336-b47f-4d7f-923b-e045ba8d07ca {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.962237] env[62684]: DEBUG oslo_concurrency.lockutils [None req-05195ea6-e22f-4c68-86fd-f0dbbab0d69c tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.143s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1787.965986] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Acquiring lock "cfe219da-adf9-44b9-9df3-752ccf72a68b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1787.966285] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Lock "cfe219da-adf9-44b9-9df3-752ccf72a68b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1787.966739] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.114s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1787.968262] env[62684]: INFO nova.compute.claims [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1787.993222] env[62684]: INFO nova.scheduler.client.report [None req-05195ea6-e22f-4c68-86fd-f0dbbab0d69c tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Deleted allocations for instance e1540aa6-12a4-4cff-a444-d47ee66c78d7 [ 1788.017434] env[62684]: DEBUG oslo_vmware.api [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Task: {'id': task-2052494, 'name': ReconfigVM_Task, 'duration_secs': 0.691494} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1788.018381] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Reconfigured VM instance instance-00000015 to attach disk [datastore1] 17d30180-9770-4329-a6d8-757a93514a96/17d30180-9770-4329-a6d8-757a93514a96.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1788.019075] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-16577fd4-5d58-46b7-acba-b1816161700b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.026783] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Acquiring lock "c6dc5401-f59e-4c18-9553-1240e2f49bce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1788.026783] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Lock "c6dc5401-f59e-4c18-9553-1240e2f49bce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1788.027575] env[62684]: DEBUG oslo_vmware.api [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Waiting for the task: (returnval){ [ 1788.027575] env[62684]: value = "task-2052495" [ 1788.027575] env[62684]: _type = "Task" [ 1788.027575] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.036319] env[62684]: DEBUG oslo_vmware.api [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Task: {'id': task-2052495, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.059207] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Releasing lock "refresh_cache-ab2c7cbe-6f46-4174-bffb-055a15f2d56b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1788.059717] env[62684]: DEBUG nova.compute.manager [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Instance network_info: |[{"id": "19976c5d-9288-4b98-b988-e0f5d4e855e9", "address": "fa:16:3e:93:76:3d", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.218", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19976c5d-92", "ovs_interfaceid": "19976c5d-9288-4b98-b988-e0f5d4e855e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1788.062016] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:93:76:3d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ded18042-834c-4792-b3e8-b1c377446432', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '19976c5d-9288-4b98-b988-e0f5d4e855e9', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1788.068287] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Creating folder: Project (d3ffe76851ed41d19f0b447f807a5f72). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1788.069450] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dd2fe0bc-35eb-47e0-8dff-5f2e4fd8336d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.084014] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Created folder: Project (d3ffe76851ed41d19f0b447f807a5f72) in parent group-v421118. [ 1788.085144] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Creating folder: Instances. Parent ref: group-v421189. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1788.085144] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5d9150f6-e11a-4c1e-b446-3746400a7d83 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.095810] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Acquiring lock "5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1788.096015] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Lock "5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1788.096525] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Created folder: Instances in parent group-v421189. [ 1788.096747] env[62684]: DEBUG oslo.service.loopingcall [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1788.096943] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1788.097166] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dfd2d7a5-6029-40e7-a60e-70709ba480ee {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.116766] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1788.116766] env[62684]: value = "task-2052498" [ 1788.116766] env[62684]: _type = "Task" [ 1788.116766] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.124532] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052498, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.267188] env[62684]: DEBUG oslo_concurrency.lockutils [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Acquiring lock "refresh_cache-ca22ca59-1b60-46f0-ae83-03ed4002fa0d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1788.267354] env[62684]: DEBUG oslo_concurrency.lockutils [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Acquired lock "refresh_cache-ca22ca59-1b60-46f0-ae83-03ed4002fa0d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1788.267490] env[62684]: DEBUG nova.network.neutron [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1788.501862] env[62684]: DEBUG oslo_concurrency.lockutils [None req-05195ea6-e22f-4c68-86fd-f0dbbab0d69c tempest-ServerDiagnosticsV248Test-298832208 tempest-ServerDiagnosticsV248Test-298832208-project-member] Lock "e1540aa6-12a4-4cff-a444-d47ee66c78d7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.033s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1788.542284] env[62684]: DEBUG oslo_vmware.api [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Task: {'id': task-2052495, 'name': Rename_Task, 'duration_secs': 0.137964} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1788.543560] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1788.543560] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-94c139df-27f0-4ca9-837f-fe81612ecc3b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.551013] env[62684]: DEBUG oslo_vmware.api [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Waiting for the task: (returnval){ [ 1788.551013] env[62684]: value = "task-2052499" [ 1788.551013] env[62684]: _type = "Task" [ 1788.551013] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.558486] env[62684]: DEBUG oslo_vmware.api [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Task: {'id': task-2052499, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.609346] env[62684]: DEBUG nova.compute.manager [req-b5c7438e-57ec-4763-a23c-8ef49e9fdd06 req-ab35a96b-b0de-42c8-b989-97182969b2a8 service nova] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Received event network-vif-plugged-6b84c070-8ca0-4da2-9936-7e97377d47ed {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1788.609554] env[62684]: DEBUG oslo_concurrency.lockutils [req-b5c7438e-57ec-4763-a23c-8ef49e9fdd06 req-ab35a96b-b0de-42c8-b989-97182969b2a8 service nova] Acquiring lock "ca22ca59-1b60-46f0-ae83-03ed4002fa0d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1788.609789] env[62684]: DEBUG oslo_concurrency.lockutils [req-b5c7438e-57ec-4763-a23c-8ef49e9fdd06 req-ab35a96b-b0de-42c8-b989-97182969b2a8 service nova] Lock "ca22ca59-1b60-46f0-ae83-03ed4002fa0d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1788.609944] env[62684]: DEBUG oslo_concurrency.lockutils [req-b5c7438e-57ec-4763-a23c-8ef49e9fdd06 req-ab35a96b-b0de-42c8-b989-97182969b2a8 service nova] Lock "ca22ca59-1b60-46f0-ae83-03ed4002fa0d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1788.614128] env[62684]: DEBUG nova.compute.manager [req-b5c7438e-57ec-4763-a23c-8ef49e9fdd06 req-ab35a96b-b0de-42c8-b989-97182969b2a8 service nova] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] No waiting events found dispatching network-vif-plugged-6b84c070-8ca0-4da2-9936-7e97377d47ed {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1788.614128] env[62684]: WARNING nova.compute.manager [req-b5c7438e-57ec-4763-a23c-8ef49e9fdd06 req-ab35a96b-b0de-42c8-b989-97182969b2a8 service nova] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Received unexpected event network-vif-plugged-6b84c070-8ca0-4da2-9936-7e97377d47ed for instance with vm_state building and task_state spawning. [ 1788.627764] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052498, 'name': CreateVM_Task, 'duration_secs': 0.403978} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1788.627764] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1788.628264] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1788.628421] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1788.628732] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1788.628992] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3772d26e-64f5-4378-b825-738a13bff7cf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.633898] env[62684]: DEBUG oslo_vmware.api [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Waiting for the task: (returnval){ [ 1788.633898] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5271b96c-c5f6-9484-1dd7-12e07b0564a4" [ 1788.633898] env[62684]: _type = "Task" [ 1788.633898] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.642808] env[62684]: DEBUG oslo_vmware.api [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5271b96c-c5f6-9484-1dd7-12e07b0564a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.688227] env[62684]: DEBUG nova.compute.manager [req-2177620e-7b29-42ae-9c39-6844cc25abd6 req-97fabf97-5209-4176-ba4e-d82b40373523 service nova] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Received event network-vif-plugged-19976c5d-9288-4b98-b988-e0f5d4e855e9 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1788.688508] env[62684]: DEBUG oslo_concurrency.lockutils [req-2177620e-7b29-42ae-9c39-6844cc25abd6 req-97fabf97-5209-4176-ba4e-d82b40373523 service nova] Acquiring lock "ab2c7cbe-6f46-4174-bffb-055a15f2d56b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1788.688729] env[62684]: DEBUG oslo_concurrency.lockutils [req-2177620e-7b29-42ae-9c39-6844cc25abd6 req-97fabf97-5209-4176-ba4e-d82b40373523 service nova] Lock "ab2c7cbe-6f46-4174-bffb-055a15f2d56b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1788.688902] env[62684]: DEBUG oslo_concurrency.lockutils [req-2177620e-7b29-42ae-9c39-6844cc25abd6 req-97fabf97-5209-4176-ba4e-d82b40373523 service nova] Lock "ab2c7cbe-6f46-4174-bffb-055a15f2d56b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1788.689101] env[62684]: DEBUG nova.compute.manager [req-2177620e-7b29-42ae-9c39-6844cc25abd6 req-97fabf97-5209-4176-ba4e-d82b40373523 service nova] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] No waiting events found dispatching network-vif-plugged-19976c5d-9288-4b98-b988-e0f5d4e855e9 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1788.689278] env[62684]: WARNING nova.compute.manager [req-2177620e-7b29-42ae-9c39-6844cc25abd6 req-97fabf97-5209-4176-ba4e-d82b40373523 service nova] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Received unexpected event network-vif-plugged-19976c5d-9288-4b98-b988-e0f5d4e855e9 for instance with vm_state building and task_state spawning. [ 1788.689443] env[62684]: DEBUG nova.compute.manager [req-2177620e-7b29-42ae-9c39-6844cc25abd6 req-97fabf97-5209-4176-ba4e-d82b40373523 service nova] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Received event network-changed-19976c5d-9288-4b98-b988-e0f5d4e855e9 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1788.689604] env[62684]: DEBUG nova.compute.manager [req-2177620e-7b29-42ae-9c39-6844cc25abd6 req-97fabf97-5209-4176-ba4e-d82b40373523 service nova] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Refreshing instance network info cache due to event network-changed-19976c5d-9288-4b98-b988-e0f5d4e855e9. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1788.689872] env[62684]: DEBUG oslo_concurrency.lockutils [req-2177620e-7b29-42ae-9c39-6844cc25abd6 req-97fabf97-5209-4176-ba4e-d82b40373523 service nova] Acquiring lock "refresh_cache-ab2c7cbe-6f46-4174-bffb-055a15f2d56b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1788.689978] env[62684]: DEBUG oslo_concurrency.lockutils [req-2177620e-7b29-42ae-9c39-6844cc25abd6 req-97fabf97-5209-4176-ba4e-d82b40373523 service nova] Acquired lock "refresh_cache-ab2c7cbe-6f46-4174-bffb-055a15f2d56b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1788.690132] env[62684]: DEBUG nova.network.neutron [req-2177620e-7b29-42ae-9c39-6844cc25abd6 req-97fabf97-5209-4176-ba4e-d82b40373523 service nova] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Refreshing network info cache for port 19976c5d-9288-4b98-b988-e0f5d4e855e9 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1788.818824] env[62684]: DEBUG nova.network.neutron [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1789.021658] env[62684]: DEBUG nova.network.neutron [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Updating instance_info_cache with network_info: [{"id": "6b84c070-8ca0-4da2-9936-7e97377d47ed", "address": "fa:16:3e:74:ed:f3", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.241", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b84c070-8c", "ovs_interfaceid": "6b84c070-8ca0-4da2-9936-7e97377d47ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1789.069657] env[62684]: DEBUG oslo_vmware.api [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Task: {'id': task-2052499, 'name': PowerOnVM_Task} progress is 37%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.145229] env[62684]: DEBUG oslo_vmware.api [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5271b96c-c5f6-9484-1dd7-12e07b0564a4, 'name': SearchDatastore_Task, 'duration_secs': 0.01616} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.146171] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1789.146171] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1789.146171] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1789.146312] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1789.146514] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1789.146793] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-103f4d19-552f-4551-9dab-7dbafd79ca51 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.158686] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1789.159018] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1789.159867] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c07fd9b1-a1ee-437b-b86c-649a0d6a57a7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.168319] env[62684]: DEBUG oslo_vmware.api [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Waiting for the task: (returnval){ [ 1789.168319] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d673f7-5b93-a0d1-45b4-67fddaf18f51" [ 1789.168319] env[62684]: _type = "Task" [ 1789.168319] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.181616] env[62684]: DEBUG oslo_vmware.api [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d673f7-5b93-a0d1-45b4-67fddaf18f51, 'name': SearchDatastore_Task, 'duration_secs': 0.008992} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.184819] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cfb13eaa-1854-4639-8277-6374c4327c97 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.191047] env[62684]: DEBUG oslo_vmware.api [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Waiting for the task: (returnval){ [ 1789.191047] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c03bb6-0c31-6d62-b8f5-ddddc6b946ed" [ 1789.191047] env[62684]: _type = "Task" [ 1789.191047] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.206759] env[62684]: DEBUG oslo_vmware.api [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c03bb6-0c31-6d62-b8f5-ddddc6b946ed, 'name': SearchDatastore_Task, 'duration_secs': 0.009579} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.207470] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1789.207768] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] ab2c7cbe-6f46-4174-bffb-055a15f2d56b/ab2c7cbe-6f46-4174-bffb-055a15f2d56b.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1789.208533] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3985daf9-7ee9-4529-bee6-585843036055 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.219275] env[62684]: DEBUG oslo_vmware.api [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Waiting for the task: (returnval){ [ 1789.219275] env[62684]: value = "task-2052500" [ 1789.219275] env[62684]: _type = "Task" [ 1789.219275] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.225800] env[62684]: DEBUG oslo_vmware.api [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052500, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.230978] env[62684]: DEBUG oslo_concurrency.lockutils [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Acquiring lock "025dfe36-1f14-4bda-84a0-d424364b745b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.231241] env[62684]: DEBUG oslo_concurrency.lockutils [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "025dfe36-1f14-4bda-84a0-d424364b745b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1789.460136] env[62684]: DEBUG nova.network.neutron [req-2177620e-7b29-42ae-9c39-6844cc25abd6 req-97fabf97-5209-4176-ba4e-d82b40373523 service nova] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Updated VIF entry in instance network info cache for port 19976c5d-9288-4b98-b988-e0f5d4e855e9. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1789.461639] env[62684]: DEBUG nova.network.neutron [req-2177620e-7b29-42ae-9c39-6844cc25abd6 req-97fabf97-5209-4176-ba4e-d82b40373523 service nova] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Updating instance_info_cache with network_info: [{"id": "19976c5d-9288-4b98-b988-e0f5d4e855e9", "address": "fa:16:3e:93:76:3d", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.218", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19976c5d-92", "ovs_interfaceid": "19976c5d-9288-4b98-b988-e0f5d4e855e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1789.525270] env[62684]: DEBUG oslo_concurrency.lockutils [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Releasing lock "refresh_cache-ca22ca59-1b60-46f0-ae83-03ed4002fa0d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1789.525595] env[62684]: DEBUG nova.compute.manager [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Instance network_info: |[{"id": "6b84c070-8ca0-4da2-9936-7e97377d47ed", "address": "fa:16:3e:74:ed:f3", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.241", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b84c070-8c", "ovs_interfaceid": "6b84c070-8ca0-4da2-9936-7e97377d47ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1789.526043] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:74:ed:f3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ded18042-834c-4792-b3e8-b1c377446432', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6b84c070-8ca0-4da2-9936-7e97377d47ed', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1789.536041] env[62684]: DEBUG oslo.service.loopingcall [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1789.536041] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1789.537565] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-81b10eff-9b3a-4bd6-84b2-100dc6874151 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.578209] env[62684]: DEBUG oslo_vmware.api [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Task: {'id': task-2052499, 'name': PowerOnVM_Task} progress is 82%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.583534] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1789.583534] env[62684]: value = "task-2052501" [ 1789.583534] env[62684]: _type = "Task" [ 1789.583534] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.595717] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052501, 'name': CreateVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.699114] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9896596e-1b3b-4d06-becd-e44baa74db0b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.709581] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8eb1793-7716-4ef7-8b67-5a4253b9d3a8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.746870] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b74c047-e9d0-43ae-bd02-552cbdc2c17f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.755996] env[62684]: DEBUG oslo_vmware.api [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052500, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.760072] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04f51734-968e-457d-b595-ade02169a037 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.776991] env[62684]: DEBUG nova.compute.provider_tree [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1789.964976] env[62684]: DEBUG oslo_concurrency.lockutils [req-2177620e-7b29-42ae-9c39-6844cc25abd6 req-97fabf97-5209-4176-ba4e-d82b40373523 service nova] Releasing lock "refresh_cache-ab2c7cbe-6f46-4174-bffb-055a15f2d56b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1790.077631] env[62684]: DEBUG oslo_vmware.api [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Task: {'id': task-2052499, 'name': PowerOnVM_Task, 'duration_secs': 1.119441} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.078022] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1790.079117] env[62684]: INFO nova.compute.manager [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Took 10.40 seconds to spawn the instance on the hypervisor. [ 1790.079344] env[62684]: DEBUG nova.compute.manager [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1790.082353] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65941909-e48d-48d3-b6c9-5f57ec1a6d5e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.100703] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052501, 'name': CreateVM_Task, 'duration_secs': 0.473495} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.101125] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1790.101783] env[62684]: DEBUG oslo_concurrency.lockutils [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1790.102024] env[62684]: DEBUG oslo_concurrency.lockutils [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1790.102358] env[62684]: DEBUG oslo_concurrency.lockutils [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1790.102606] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fff6677c-2f94-495a-8324-9d42b7edaf55 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.107896] env[62684]: DEBUG oslo_vmware.api [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Waiting for the task: (returnval){ [ 1790.107896] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c3c8c6-3744-df7f-f3f5-0c93f14c844c" [ 1790.107896] env[62684]: _type = "Task" [ 1790.107896] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.116196] env[62684]: DEBUG oslo_vmware.api [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c3c8c6-3744-df7f-f3f5-0c93f14c844c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.258878] env[62684]: DEBUG oslo_vmware.api [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052500, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.644109} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.259913] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] ab2c7cbe-6f46-4174-bffb-055a15f2d56b/ab2c7cbe-6f46-4174-bffb-055a15f2d56b.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1790.260202] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1790.260478] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-602c888c-8cb4-4ff8-aebe-953c5a2bfedd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.267581] env[62684]: DEBUG oslo_vmware.api [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Waiting for the task: (returnval){ [ 1790.267581] env[62684]: value = "task-2052502" [ 1790.267581] env[62684]: _type = "Task" [ 1790.267581] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.276509] env[62684]: DEBUG oslo_vmware.api [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052502, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.280866] env[62684]: DEBUG nova.scheduler.client.report [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1790.308508] env[62684]: DEBUG nova.network.neutron [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Successfully updated port: aa71af47-855f-4fc6-9a8d-ca724cde3d12 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1790.614028] env[62684]: INFO nova.compute.manager [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Took 36.58 seconds to build instance. [ 1790.623891] env[62684]: DEBUG oslo_vmware.api [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c3c8c6-3744-df7f-f3f5-0c93f14c844c, 'name': SearchDatastore_Task, 'duration_secs': 0.023153} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.623891] env[62684]: DEBUG oslo_concurrency.lockutils [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1790.624047] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1790.625187] env[62684]: DEBUG oslo_concurrency.lockutils [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1790.625187] env[62684]: DEBUG oslo_concurrency.lockutils [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1790.625187] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1790.625187] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b33a000a-834c-4a6c-b922-a6572af341d6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.633850] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1790.633939] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1790.635322] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2bf36154-b87c-4c4c-8dfa-ee2eb84a74b0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.644709] env[62684]: DEBUG oslo_vmware.api [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Waiting for the task: (returnval){ [ 1790.644709] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d87585-e54b-4af1-40af-596eb60e287c" [ 1790.644709] env[62684]: _type = "Task" [ 1790.644709] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.656261] env[62684]: DEBUG oslo_vmware.api [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d87585-e54b-4af1-40af-596eb60e287c, 'name': SearchDatastore_Task, 'duration_secs': 0.00937} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.657058] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-61e768b4-b574-4c5b-9680-de46e9955753 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.663577] env[62684]: DEBUG oslo_vmware.api [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Waiting for the task: (returnval){ [ 1790.663577] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c97c47-7f7c-0473-cee1-70ba0f668fff" [ 1790.663577] env[62684]: _type = "Task" [ 1790.663577] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.677166] env[62684]: DEBUG oslo_vmware.api [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c97c47-7f7c-0473-cee1-70ba0f668fff, 'name': SearchDatastore_Task, 'duration_secs': 0.010531} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.677472] env[62684]: DEBUG oslo_concurrency.lockutils [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1790.678033] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] ca22ca59-1b60-46f0-ae83-03ed4002fa0d/ca22ca59-1b60-46f0-ae83-03ed4002fa0d.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1790.678033] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c0d9b106-d5ea-4b91-b594-e226bd45af2d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.684760] env[62684]: DEBUG oslo_vmware.api [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Waiting for the task: (returnval){ [ 1790.684760] env[62684]: value = "task-2052503" [ 1790.684760] env[62684]: _type = "Task" [ 1790.684760] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.693275] env[62684]: DEBUG oslo_vmware.api [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': task-2052503, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.778532] env[62684]: DEBUG oslo_vmware.api [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052502, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065005} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.780040] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1790.781280] env[62684]: DEBUG nova.compute.manager [req-6acf7588-af8e-4e60-b4fa-ea2e649caf14 req-a0d491fe-e40e-4625-84cf-c6a6572e567a service nova] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Received event network-changed-6b84c070-8ca0-4da2-9936-7e97377d47ed {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1790.782759] env[62684]: DEBUG nova.compute.manager [req-6acf7588-af8e-4e60-b4fa-ea2e649caf14 req-a0d491fe-e40e-4625-84cf-c6a6572e567a service nova] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Refreshing instance network info cache due to event network-changed-6b84c070-8ca0-4da2-9936-7e97377d47ed. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1790.782759] env[62684]: DEBUG oslo_concurrency.lockutils [req-6acf7588-af8e-4e60-b4fa-ea2e649caf14 req-a0d491fe-e40e-4625-84cf-c6a6572e567a service nova] Acquiring lock "refresh_cache-ca22ca59-1b60-46f0-ae83-03ed4002fa0d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1790.782759] env[62684]: DEBUG oslo_concurrency.lockutils [req-6acf7588-af8e-4e60-b4fa-ea2e649caf14 req-a0d491fe-e40e-4625-84cf-c6a6572e567a service nova] Acquired lock "refresh_cache-ca22ca59-1b60-46f0-ae83-03ed4002fa0d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1790.782759] env[62684]: DEBUG nova.network.neutron [req-6acf7588-af8e-4e60-b4fa-ea2e649caf14 req-a0d491fe-e40e-4625-84cf-c6a6572e567a service nova] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Refreshing network info cache for port 6b84c070-8ca0-4da2-9936-7e97377d47ed {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1790.784018] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d52bedc8-fb53-486c-af67-25a73f3eeebb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.796178] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.829s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1790.796708] env[62684]: DEBUG nova.compute.manager [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1790.801230] env[62684]: DEBUG oslo_concurrency.lockutils [None req-610ebd80-6d9b-40af-9d21-7178b1ec7cf4 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.848s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1790.801230] env[62684]: DEBUG nova.objects.instance [None req-610ebd80-6d9b-40af-9d21-7178b1ec7cf4 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lazy-loading 'resources' on Instance uuid 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1790.816714] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Acquiring lock "refresh_cache-a9dfeb4d-a92e-41cf-9d2f-43086cc9e868" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1790.816855] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Acquired lock "refresh_cache-a9dfeb4d-a92e-41cf-9d2f-43086cc9e868" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1790.816998] env[62684]: DEBUG nova.network.neutron [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1790.828997] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Reconfiguring VM instance instance-00000016 to attach disk [datastore1] ab2c7cbe-6f46-4174-bffb-055a15f2d56b/ab2c7cbe-6f46-4174-bffb-055a15f2d56b.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1790.830078] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-90f42fbd-9739-4f54-88d0-bea6c7b7774c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.853714] env[62684]: DEBUG oslo_vmware.api [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Waiting for the task: (returnval){ [ 1790.853714] env[62684]: value = "task-2052504" [ 1790.853714] env[62684]: _type = "Task" [ 1790.853714] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.862795] env[62684]: DEBUG oslo_vmware.api [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052504, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.895174] env[62684]: DEBUG nova.compute.manager [req-194673ba-9220-4c51-83d8-9352500ac204 req-341e91e3-c438-458f-8c97-7c10e3560259 service nova] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Received event network-vif-plugged-aa71af47-855f-4fc6-9a8d-ca724cde3d12 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1790.895484] env[62684]: DEBUG oslo_concurrency.lockutils [req-194673ba-9220-4c51-83d8-9352500ac204 req-341e91e3-c438-458f-8c97-7c10e3560259 service nova] Acquiring lock "a9dfeb4d-a92e-41cf-9d2f-43086cc9e868-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1790.895811] env[62684]: DEBUG oslo_concurrency.lockutils [req-194673ba-9220-4c51-83d8-9352500ac204 req-341e91e3-c438-458f-8c97-7c10e3560259 service nova] Lock "a9dfeb4d-a92e-41cf-9d2f-43086cc9e868-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1790.895966] env[62684]: DEBUG oslo_concurrency.lockutils [req-194673ba-9220-4c51-83d8-9352500ac204 req-341e91e3-c438-458f-8c97-7c10e3560259 service nova] Lock "a9dfeb4d-a92e-41cf-9d2f-43086cc9e868-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1790.896246] env[62684]: DEBUG nova.compute.manager [req-194673ba-9220-4c51-83d8-9352500ac204 req-341e91e3-c438-458f-8c97-7c10e3560259 service nova] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] No waiting events found dispatching network-vif-plugged-aa71af47-855f-4fc6-9a8d-ca724cde3d12 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1790.896470] env[62684]: WARNING nova.compute.manager [req-194673ba-9220-4c51-83d8-9352500ac204 req-341e91e3-c438-458f-8c97-7c10e3560259 service nova] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Received unexpected event network-vif-plugged-aa71af47-855f-4fc6-9a8d-ca724cde3d12 for instance with vm_state building and task_state spawning. [ 1790.896674] env[62684]: DEBUG nova.compute.manager [req-194673ba-9220-4c51-83d8-9352500ac204 req-341e91e3-c438-458f-8c97-7c10e3560259 service nova] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Received event network-changed-aa71af47-855f-4fc6-9a8d-ca724cde3d12 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1790.896911] env[62684]: DEBUG nova.compute.manager [req-194673ba-9220-4c51-83d8-9352500ac204 req-341e91e3-c438-458f-8c97-7c10e3560259 service nova] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Refreshing instance network info cache due to event network-changed-aa71af47-855f-4fc6-9a8d-ca724cde3d12. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1790.897093] env[62684]: DEBUG oslo_concurrency.lockutils [req-194673ba-9220-4c51-83d8-9352500ac204 req-341e91e3-c438-458f-8c97-7c10e3560259 service nova] Acquiring lock "refresh_cache-a9dfeb4d-a92e-41cf-9d2f-43086cc9e868" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1791.120057] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3c2f9e6a-a7d8-4e87-825b-1f39dc6eb2ba tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Lock "17d30180-9770-4329-a6d8-757a93514a96" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.070s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1791.194806] env[62684]: DEBUG oslo_vmware.api [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': task-2052503, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.277170] env[62684]: DEBUG nova.network.neutron [req-6acf7588-af8e-4e60-b4fa-ea2e649caf14 req-a0d491fe-e40e-4625-84cf-c6a6572e567a service nova] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Updated VIF entry in instance network info cache for port 6b84c070-8ca0-4da2-9936-7e97377d47ed. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1791.277170] env[62684]: DEBUG nova.network.neutron [req-6acf7588-af8e-4e60-b4fa-ea2e649caf14 req-a0d491fe-e40e-4625-84cf-c6a6572e567a service nova] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Updating instance_info_cache with network_info: [{"id": "6b84c070-8ca0-4da2-9936-7e97377d47ed", "address": "fa:16:3e:74:ed:f3", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.241", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b84c070-8c", "ovs_interfaceid": "6b84c070-8ca0-4da2-9936-7e97377d47ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1791.303325] env[62684]: DEBUG nova.compute.utils [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1791.305165] env[62684]: DEBUG nova.compute.manager [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1791.305407] env[62684]: DEBUG nova.network.neutron [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1791.369558] env[62684]: DEBUG oslo_vmware.api [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052504, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.384478] env[62684]: DEBUG nova.network.neutron [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1791.392717] env[62684]: DEBUG nova.policy [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e957449ae9d24bdaba38b3db704d3d61', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5cb4900a999e467bafdfd1fb407a82f4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1791.623547] env[62684]: DEBUG nova.compute.manager [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1791.655286] env[62684]: DEBUG nova.network.neutron [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Updating instance_info_cache with network_info: [{"id": "aa71af47-855f-4fc6-9a8d-ca724cde3d12", "address": "fa:16:3e:c1:ef:b3", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.57", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa71af47-85", "ovs_interfaceid": "aa71af47-855f-4fc6-9a8d-ca724cde3d12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1791.699926] env[62684]: DEBUG oslo_vmware.api [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': task-2052503, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.537567} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.702719] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] ca22ca59-1b60-46f0-ae83-03ed4002fa0d/ca22ca59-1b60-46f0-ae83-03ed4002fa0d.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1791.702968] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1791.703430] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5aa3d7c6-0b44-4f7f-b71c-e7996fc3027e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.711565] env[62684]: DEBUG oslo_vmware.api [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Waiting for the task: (returnval){ [ 1791.711565] env[62684]: value = "task-2052505" [ 1791.711565] env[62684]: _type = "Task" [ 1791.711565] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.726164] env[62684]: DEBUG oslo_vmware.api [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': task-2052505, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.782092] env[62684]: DEBUG oslo_concurrency.lockutils [req-6acf7588-af8e-4e60-b4fa-ea2e649caf14 req-a0d491fe-e40e-4625-84cf-c6a6572e567a service nova] Releasing lock "refresh_cache-ca22ca59-1b60-46f0-ae83-03ed4002fa0d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1791.815734] env[62684]: DEBUG nova.compute.manager [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1791.867992] env[62684]: DEBUG oslo_vmware.api [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052504, 'name': ReconfigVM_Task, 'duration_secs': 0.80135} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.868755] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Reconfigured VM instance instance-00000016 to attach disk [datastore1] ab2c7cbe-6f46-4174-bffb-055a15f2d56b/ab2c7cbe-6f46-4174-bffb-055a15f2d56b.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1791.872828] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6d0e6b51-427a-43cf-8ccf-4e3d5b2f424c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.880992] env[62684]: DEBUG oslo_vmware.api [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Waiting for the task: (returnval){ [ 1791.880992] env[62684]: value = "task-2052506" [ 1791.880992] env[62684]: _type = "Task" [ 1791.880992] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.895276] env[62684]: DEBUG oslo_vmware.api [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052506, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.928752] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Acquiring lock "b945f05d-ef1c-4469-9390-f7bbd4f435f0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1791.929065] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Lock "b945f05d-ef1c-4469-9390-f7bbd4f435f0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1791.948406] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75e2845f-57d3-4436-aa95-c2a5d110d140 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.956180] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05dff065-91d8-4ff0-9770-78433188db1f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.986763] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47892159-ce88-4cd4-87a4-357437d61cbe {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.990488] env[62684]: DEBUG nova.network.neutron [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Successfully created port: 220c5589-f035-4097-8c0d-dfd565a9203a {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1791.997547] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66d783f7-e2b0-416b-b967-617abd06fd88 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.011397] env[62684]: DEBUG nova.compute.provider_tree [None req-610ebd80-6d9b-40af-9d21-7178b1ec7cf4 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1792.146912] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1792.160957] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Releasing lock "refresh_cache-a9dfeb4d-a92e-41cf-9d2f-43086cc9e868" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1792.161294] env[62684]: DEBUG nova.compute.manager [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Instance network_info: |[{"id": "aa71af47-855f-4fc6-9a8d-ca724cde3d12", "address": "fa:16:3e:c1:ef:b3", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.57", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa71af47-85", "ovs_interfaceid": "aa71af47-855f-4fc6-9a8d-ca724cde3d12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1792.161598] env[62684]: DEBUG oslo_concurrency.lockutils [req-194673ba-9220-4c51-83d8-9352500ac204 req-341e91e3-c438-458f-8c97-7c10e3560259 service nova] Acquired lock "refresh_cache-a9dfeb4d-a92e-41cf-9d2f-43086cc9e868" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1792.161776] env[62684]: DEBUG nova.network.neutron [req-194673ba-9220-4c51-83d8-9352500ac204 req-341e91e3-c438-458f-8c97-7c10e3560259 service nova] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Refreshing network info cache for port aa71af47-855f-4fc6-9a8d-ca724cde3d12 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1792.166110] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c1:ef:b3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ded18042-834c-4792-b3e8-b1c377446432', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'aa71af47-855f-4fc6-9a8d-ca724cde3d12', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1792.173664] env[62684]: DEBUG oslo.service.loopingcall [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1792.176937] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1792.177730] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f6dd6084-ed02-47fa-8d24-3208aa013347 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.201404] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1792.201404] env[62684]: value = "task-2052507" [ 1792.201404] env[62684]: _type = "Task" [ 1792.201404] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.211836] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052507, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.219971] env[62684]: DEBUG oslo_vmware.api [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': task-2052505, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059568} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.220430] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1792.221099] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-381f8173-62c6-4fb6-8201-032980a52f83 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.244965] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Reconfiguring VM instance instance-00000017 to attach disk [datastore1] ca22ca59-1b60-46f0-ae83-03ed4002fa0d/ca22ca59-1b60-46f0-ae83-03ed4002fa0d.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1792.248146] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5db85439-7dbb-421b-ae70-6e4562218860 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.269722] env[62684]: DEBUG oslo_vmware.api [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Waiting for the task: (returnval){ [ 1792.269722] env[62684]: value = "task-2052508" [ 1792.269722] env[62684]: _type = "Task" [ 1792.269722] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.279020] env[62684]: DEBUG oslo_vmware.api [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': task-2052508, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.398497] env[62684]: DEBUG oslo_vmware.api [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052506, 'name': Rename_Task, 'duration_secs': 0.367564} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.398925] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1792.399316] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-abc0d95f-9988-4c6f-8e2d-2eb8cf5f8e7f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.406990] env[62684]: DEBUG oslo_vmware.api [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Waiting for the task: (returnval){ [ 1792.406990] env[62684]: value = "task-2052509" [ 1792.406990] env[62684]: _type = "Task" [ 1792.406990] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.415615] env[62684]: DEBUG oslo_vmware.api [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052509, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.519791] env[62684]: DEBUG nova.scheduler.client.report [None req-610ebd80-6d9b-40af-9d21-7178b1ec7cf4 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1792.567904] env[62684]: DEBUG nova.network.neutron [req-194673ba-9220-4c51-83d8-9352500ac204 req-341e91e3-c438-458f-8c97-7c10e3560259 service nova] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Updated VIF entry in instance network info cache for port aa71af47-855f-4fc6-9a8d-ca724cde3d12. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1792.568386] env[62684]: DEBUG nova.network.neutron [req-194673ba-9220-4c51-83d8-9352500ac204 req-341e91e3-c438-458f-8c97-7c10e3560259 service nova] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Updating instance_info_cache with network_info: [{"id": "aa71af47-855f-4fc6-9a8d-ca724cde3d12", "address": "fa:16:3e:c1:ef:b3", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.57", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa71af47-85", "ovs_interfaceid": "aa71af47-855f-4fc6-9a8d-ca724cde3d12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1792.712897] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052507, 'name': CreateVM_Task, 'duration_secs': 0.393418} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.712897] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1792.714031] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1792.714031] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1792.714031] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1792.715366] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33a1d66b-7240-4d2f-8805-827c707b83d0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.729845] env[62684]: DEBUG oslo_vmware.api [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Waiting for the task: (returnval){ [ 1792.729845] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5263ffa9-2d2e-61c1-2cb9-c3741752c09c" [ 1792.729845] env[62684]: _type = "Task" [ 1792.729845] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.739187] env[62684]: DEBUG oslo_vmware.api [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5263ffa9-2d2e-61c1-2cb9-c3741752c09c, 'name': SearchDatastore_Task, 'duration_secs': 0.012833} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.739477] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1792.739711] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1792.739976] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1792.740102] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1792.740344] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1792.740550] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-26d032bd-5963-4d9a-97e1-805e4c99c807 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.748557] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1792.748742] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1792.749507] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-546387d3-4199-4ba1-977e-94cea39ad9fc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.754518] env[62684]: DEBUG oslo_vmware.api [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Waiting for the task: (returnval){ [ 1792.754518] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e6529c-9093-1198-ff8c-c7340af67045" [ 1792.754518] env[62684]: _type = "Task" [ 1792.754518] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.761879] env[62684]: DEBUG oslo_vmware.api [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e6529c-9093-1198-ff8c-c7340af67045, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.778338] env[62684]: DEBUG oslo_vmware.api [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': task-2052508, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.828430] env[62684]: DEBUG nova.compute.manager [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1792.862130] env[62684]: DEBUG nova.virt.hardware [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1792.862774] env[62684]: DEBUG nova.virt.hardware [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1792.862774] env[62684]: DEBUG nova.virt.hardware [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1792.862774] env[62684]: DEBUG nova.virt.hardware [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1792.862956] env[62684]: DEBUG nova.virt.hardware [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1792.863057] env[62684]: DEBUG nova.virt.hardware [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1792.866278] env[62684]: DEBUG nova.virt.hardware [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1792.866486] env[62684]: DEBUG nova.virt.hardware [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1792.866669] env[62684]: DEBUG nova.virt.hardware [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1792.866899] env[62684]: DEBUG nova.virt.hardware [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1792.867039] env[62684]: DEBUG nova.virt.hardware [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1792.868528] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f99eb5ee-c123-4c43-9661-3965d417a4c0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.885540] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adfda84b-5774-440d-b7cb-d180eb284bee {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.920529] env[62684]: DEBUG oslo_vmware.api [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052509, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.022880] env[62684]: DEBUG oslo_concurrency.lockutils [None req-610ebd80-6d9b-40af-9d21-7178b1ec7cf4 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.222s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1793.026287] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5a438e95-4d08-4613-9f63-89e83c1490db tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.610s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1793.026287] env[62684]: DEBUG nova.objects.instance [None req-5a438e95-4d08-4613-9f63-89e83c1490db tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Lazy-loading 'resources' on Instance uuid 43d28811-26e4-4016-9f82-98349d4a05b7 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1793.060417] env[62684]: INFO nova.scheduler.client.report [None req-610ebd80-6d9b-40af-9d21-7178b1ec7cf4 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Deleted allocations for instance 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb [ 1793.071279] env[62684]: DEBUG oslo_concurrency.lockutils [req-194673ba-9220-4c51-83d8-9352500ac204 req-341e91e3-c438-458f-8c97-7c10e3560259 service nova] Releasing lock "refresh_cache-a9dfeb4d-a92e-41cf-9d2f-43086cc9e868" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1793.267667] env[62684]: DEBUG oslo_vmware.api [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e6529c-9093-1198-ff8c-c7340af67045, 'name': SearchDatastore_Task, 'duration_secs': 0.00788} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1793.268236] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8240ebc2-75a9-43a1-aa4a-b0aff04909c1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.276915] env[62684]: DEBUG oslo_vmware.api [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Waiting for the task: (returnval){ [ 1793.276915] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5255859f-6516-4f52-3a90-f182c37dba48" [ 1793.276915] env[62684]: _type = "Task" [ 1793.276915] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.280576] env[62684]: DEBUG oslo_vmware.api [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': task-2052508, 'name': ReconfigVM_Task, 'duration_secs': 0.800692} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1793.283707] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Reconfigured VM instance instance-00000017 to attach disk [datastore1] ca22ca59-1b60-46f0-ae83-03ed4002fa0d/ca22ca59-1b60-46f0-ae83-03ed4002fa0d.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1793.284409] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e89e13df-e8e6-4694-a995-5e1ba4c6eb3f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.290812] env[62684]: DEBUG oslo_vmware.api [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5255859f-6516-4f52-3a90-f182c37dba48, 'name': SearchDatastore_Task, 'duration_secs': 0.010073} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1793.292009] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1793.292285] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] a9dfeb4d-a92e-41cf-9d2f-43086cc9e868/a9dfeb4d-a92e-41cf-9d2f-43086cc9e868.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1793.292599] env[62684]: DEBUG oslo_vmware.api [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Waiting for the task: (returnval){ [ 1793.292599] env[62684]: value = "task-2052510" [ 1793.292599] env[62684]: _type = "Task" [ 1793.292599] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.292786] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1b2f6cf0-02f8-4dcc-9ea7-5191f6775059 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.305184] env[62684]: DEBUG oslo_vmware.api [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': task-2052510, 'name': Rename_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.306682] env[62684]: DEBUG oslo_vmware.api [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Waiting for the task: (returnval){ [ 1793.306682] env[62684]: value = "task-2052511" [ 1793.306682] env[62684]: _type = "Task" [ 1793.306682] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.317502] env[62684]: DEBUG oslo_vmware.api [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052511, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.420372] env[62684]: DEBUG oslo_vmware.api [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052509, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.568773] env[62684]: DEBUG oslo_concurrency.lockutils [None req-610ebd80-6d9b-40af-9d21-7178b1ec7cf4 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "8d53d8c3-6db8-4ebe-a35f-0f64602fafcb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.876s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1793.808942] env[62684]: DEBUG oslo_vmware.api [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': task-2052510, 'name': Rename_Task, 'duration_secs': 0.15347} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1793.812081] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1793.814642] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4c6a0631-e88f-4fba-90be-e82aef6162ef {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.821569] env[62684]: DEBUG oslo_vmware.api [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052511, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.469435} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1793.822783] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] a9dfeb4d-a92e-41cf-9d2f-43086cc9e868/a9dfeb4d-a92e-41cf-9d2f-43086cc9e868.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1793.823022] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1793.823330] env[62684]: DEBUG oslo_vmware.api [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Waiting for the task: (returnval){ [ 1793.823330] env[62684]: value = "task-2052512" [ 1793.823330] env[62684]: _type = "Task" [ 1793.823330] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.823522] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c5f21847-217d-4b45-8b77-06130b7f1d8a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.835405] env[62684]: DEBUG oslo_vmware.api [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': task-2052512, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.839052] env[62684]: DEBUG oslo_vmware.api [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Waiting for the task: (returnval){ [ 1793.839052] env[62684]: value = "task-2052513" [ 1793.839052] env[62684]: _type = "Task" [ 1793.839052] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.844461] env[62684]: DEBUG oslo_vmware.api [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052513, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.926022] env[62684]: DEBUG oslo_vmware.api [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052509, 'name': PowerOnVM_Task, 'duration_secs': 1.352112} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1793.926022] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1793.926022] env[62684]: INFO nova.compute.manager [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Took 11.50 seconds to spawn the instance on the hypervisor. [ 1793.926022] env[62684]: DEBUG nova.compute.manager [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1793.926022] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26ae2cf3-b9ac-4621-af35-8d03838665fd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.106782] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ed767e5-c9d8-4ba9-ba09-768e00a0b796 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.114590] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7321a47b-1dfc-4e70-a975-1daea8be8360 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.146949] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-026ed3bd-d3d9-4b47-8b08-728a8a8e757d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.155519] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba21db55-6f52-49d3-ab06-055575c284fb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.171127] env[62684]: DEBUG nova.compute.provider_tree [None req-5a438e95-4d08-4613-9f63-89e83c1490db tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1794.335690] env[62684]: DEBUG oslo_vmware.api [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': task-2052512, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.345495] env[62684]: DEBUG oslo_vmware.api [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052513, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.10978} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.346402] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1794.346697] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7326c461-08b8-4423-9662-115ea882263f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.369451] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Reconfiguring VM instance instance-00000018 to attach disk [datastore2] a9dfeb4d-a92e-41cf-9d2f-43086cc9e868/a9dfeb4d-a92e-41cf-9d2f-43086cc9e868.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1794.369742] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b22ff90-a8b9-4715-82a4-8b1950457c99 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.388888] env[62684]: DEBUG oslo_vmware.api [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Waiting for the task: (returnval){ [ 1794.388888] env[62684]: value = "task-2052514" [ 1794.388888] env[62684]: _type = "Task" [ 1794.388888] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.396438] env[62684]: DEBUG oslo_vmware.api [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052514, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.452384] env[62684]: INFO nova.compute.manager [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Took 37.32 seconds to build instance. [ 1794.539261] env[62684]: DEBUG nova.network.neutron [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Successfully updated port: 220c5589-f035-4097-8c0d-dfd565a9203a {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1794.674349] env[62684]: DEBUG nova.scheduler.client.report [None req-5a438e95-4d08-4613-9f63-89e83c1490db tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1794.835425] env[62684]: DEBUG oslo_vmware.api [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': task-2052512, 'name': PowerOnVM_Task, 'duration_secs': 0.953587} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.835651] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1794.836109] env[62684]: INFO nova.compute.manager [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Took 9.76 seconds to spawn the instance on the hypervisor. [ 1794.836109] env[62684]: DEBUG nova.compute.manager [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1794.837138] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74dffd46-03e7-4536-a457-dd232efc50ba {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.906134] env[62684]: DEBUG oslo_vmware.api [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052514, 'name': ReconfigVM_Task, 'duration_secs': 0.264244} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.907429] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Reconfigured VM instance instance-00000018 to attach disk [datastore2] a9dfeb4d-a92e-41cf-9d2f-43086cc9e868/a9dfeb4d-a92e-41cf-9d2f-43086cc9e868.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1794.908138] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-56d24188-1b96-43c2-b9ba-27ef67074456 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.915924] env[62684]: DEBUG oslo_vmware.api [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Waiting for the task: (returnval){ [ 1794.915924] env[62684]: value = "task-2052515" [ 1794.915924] env[62684]: _type = "Task" [ 1794.915924] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.925833] env[62684]: DEBUG oslo_vmware.api [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052515, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.954431] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb156534-4e5b-4983-a6f9-2eb10658fed6 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Lock "ab2c7cbe-6f46-4174-bffb-055a15f2d56b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.944s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1795.017973] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Acquiring lock "50bc9674-d19c-40f1-a89f-1738a1e48307" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1795.018426] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Lock "50bc9674-d19c-40f1-a89f-1738a1e48307" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1795.041159] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "refresh_cache-4e5152b0-7bac-4dc2-b6c7-6590fa2d5978" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1795.041337] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquired lock "refresh_cache-4e5152b0-7bac-4dc2-b6c7-6590fa2d5978" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1795.041484] env[62684]: DEBUG nova.network.neutron [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1795.180089] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5a438e95-4d08-4613-9f63-89e83c1490db tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.154s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1795.182504] env[62684]: DEBUG oslo_concurrency.lockutils [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.144s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1795.184133] env[62684]: INFO nova.compute.claims [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1795.213354] env[62684]: INFO nova.scheduler.client.report [None req-5a438e95-4d08-4613-9f63-89e83c1490db tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Deleted allocations for instance 43d28811-26e4-4016-9f82-98349d4a05b7 [ 1795.266183] env[62684]: DEBUG nova.compute.manager [req-c34efebe-df61-4dfc-b30c-35897328a012 req-e7a66466-fd70-4ba2-b75e-efca78d84c61 service nova] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Received event network-vif-plugged-220c5589-f035-4097-8c0d-dfd565a9203a {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1795.266183] env[62684]: DEBUG oslo_concurrency.lockutils [req-c34efebe-df61-4dfc-b30c-35897328a012 req-e7a66466-fd70-4ba2-b75e-efca78d84c61 service nova] Acquiring lock "4e5152b0-7bac-4dc2-b6c7-6590fa2d5978-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1795.266183] env[62684]: DEBUG oslo_concurrency.lockutils [req-c34efebe-df61-4dfc-b30c-35897328a012 req-e7a66466-fd70-4ba2-b75e-efca78d84c61 service nova] Lock "4e5152b0-7bac-4dc2-b6c7-6590fa2d5978-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1795.266183] env[62684]: DEBUG oslo_concurrency.lockutils [req-c34efebe-df61-4dfc-b30c-35897328a012 req-e7a66466-fd70-4ba2-b75e-efca78d84c61 service nova] Lock "4e5152b0-7bac-4dc2-b6c7-6590fa2d5978-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1795.266183] env[62684]: DEBUG nova.compute.manager [req-c34efebe-df61-4dfc-b30c-35897328a012 req-e7a66466-fd70-4ba2-b75e-efca78d84c61 service nova] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] No waiting events found dispatching network-vif-plugged-220c5589-f035-4097-8c0d-dfd565a9203a {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1795.266777] env[62684]: WARNING nova.compute.manager [req-c34efebe-df61-4dfc-b30c-35897328a012 req-e7a66466-fd70-4ba2-b75e-efca78d84c61 service nova] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Received unexpected event network-vif-plugged-220c5589-f035-4097-8c0d-dfd565a9203a for instance with vm_state building and task_state spawning. [ 1795.353232] env[62684]: INFO nova.compute.manager [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Took 37.35 seconds to build instance. [ 1795.429148] env[62684]: DEBUG oslo_vmware.api [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052515, 'name': Rename_Task, 'duration_secs': 0.148059} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.429148] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1795.429148] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9960dc98-7530-4423-a357-43559d2dccd6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.436382] env[62684]: DEBUG oslo_vmware.api [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Waiting for the task: (returnval){ [ 1795.436382] env[62684]: value = "task-2052516" [ 1795.436382] env[62684]: _type = "Task" [ 1795.436382] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.442442] env[62684]: DEBUG oslo_vmware.api [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052516, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.459680] env[62684]: DEBUG nova.compute.manager [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1795.604282] env[62684]: DEBUG nova.network.neutron [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1795.722732] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5a438e95-4d08-4613-9f63-89e83c1490db tempest-AttachInterfacesV270Test-522583453 tempest-AttachInterfacesV270Test-522583453-project-member] Lock "43d28811-26e4-4016-9f82-98349d4a05b7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.763s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1795.855538] env[62684]: DEBUG oslo_concurrency.lockutils [None req-861bcd50-a4b9-4907-abb8-8241bf69e089 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Lock "ca22ca59-1b60-46f0-ae83-03ed4002fa0d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.086s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1795.944051] env[62684]: DEBUG oslo_vmware.api [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052516, 'name': PowerOnVM_Task, 'duration_secs': 0.440814} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.944332] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1795.944539] env[62684]: INFO nova.compute.manager [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Took 8.09 seconds to spawn the instance on the hypervisor. [ 1795.944718] env[62684]: DEBUG nova.compute.manager [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1795.945738] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c95ccd15-a35d-4f6e-8656-3d09e9e3f448 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.980992] env[62684]: DEBUG oslo_concurrency.lockutils [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1796.011036] env[62684]: DEBUG nova.network.neutron [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Updating instance_info_cache with network_info: [{"id": "220c5589-f035-4097-8c0d-dfd565a9203a", "address": "fa:16:3e:3e:7c:34", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap220c5589-f0", "ovs_interfaceid": "220c5589-f035-4097-8c0d-dfd565a9203a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1796.125617] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4848e32-ac84-4b50-af37-e63fbddaf7bf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.132605] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-befbfa04-e100-450f-8304-b3734fea6600 tempest-ServersAdminNegativeTestJSON-2140285995 tempest-ServersAdminNegativeTestJSON-2140285995-project-admin] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Suspending the VM {{(pid=62684) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1796.132868] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-73d71a8e-ae6f-44cb-a8f4-6c6a0b35c3ac {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.139518] env[62684]: DEBUG oslo_vmware.api [None req-befbfa04-e100-450f-8304-b3734fea6600 tempest-ServersAdminNegativeTestJSON-2140285995 tempest-ServersAdminNegativeTestJSON-2140285995-project-admin] Waiting for the task: (returnval){ [ 1796.139518] env[62684]: value = "task-2052517" [ 1796.139518] env[62684]: _type = "Task" [ 1796.139518] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.149458] env[62684]: DEBUG oslo_vmware.api [None req-befbfa04-e100-450f-8304-b3734fea6600 tempest-ServersAdminNegativeTestJSON-2140285995 tempest-ServersAdminNegativeTestJSON-2140285995-project-admin] Task: {'id': task-2052517, 'name': SuspendVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.361232] env[62684]: DEBUG nova.compute.manager [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1796.465439] env[62684]: INFO nova.compute.manager [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Took 35.41 seconds to build instance. [ 1796.513303] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Releasing lock "refresh_cache-4e5152b0-7bac-4dc2-b6c7-6590fa2d5978" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1796.513630] env[62684]: DEBUG nova.compute.manager [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Instance network_info: |[{"id": "220c5589-f035-4097-8c0d-dfd565a9203a", "address": "fa:16:3e:3e:7c:34", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap220c5589-f0", "ovs_interfaceid": "220c5589-f035-4097-8c0d-dfd565a9203a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1796.514066] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3e:7c:34', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f880ac2e-d532-4f54-87bb-998a8d1bca78', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '220c5589-f035-4097-8c0d-dfd565a9203a', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1796.525867] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Creating folder: Project (5cb4900a999e467bafdfd1fb407a82f4). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1796.526315] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d91f71d1-abd1-4a26-bef3-2ad0338d37b4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.537544] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Created folder: Project (5cb4900a999e467bafdfd1fb407a82f4) in parent group-v421118. [ 1796.537756] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Creating folder: Instances. Parent ref: group-v421194. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1796.538015] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fe728b2b-9df0-4e67-b518-1659a50ca93c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.546766] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Created folder: Instances in parent group-v421194. [ 1796.547346] env[62684]: DEBUG oslo.service.loopingcall [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1796.550100] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1796.550550] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4c1e3121-80bc-4a9d-bdb8-2241669c9e32 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.571327] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1796.571327] env[62684]: value = "task-2052520" [ 1796.571327] env[62684]: _type = "Task" [ 1796.571327] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.579803] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052520, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.653634] env[62684]: DEBUG oslo_vmware.api [None req-befbfa04-e100-450f-8304-b3734fea6600 tempest-ServersAdminNegativeTestJSON-2140285995 tempest-ServersAdminNegativeTestJSON-2140285995-project-admin] Task: {'id': task-2052517, 'name': SuspendVM_Task} progress is 58%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.830713] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4a8b82c-4056-46f3-b543-816fa98c3b14 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.843691] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26d56db7-b334-44e3-8f5a-2e2468dd3801 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.878822] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95b19c87-5ee6-4a6d-bf5b-6d0ab4615797 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.889496] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8553e92-4fa9-46de-a55b-90e30991afcb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.908367] env[62684]: DEBUG nova.compute.provider_tree [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1796.911131] env[62684]: DEBUG oslo_concurrency.lockutils [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1796.971028] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bf46770a-b21d-4b06-b6f5-e0a39812542a tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Lock "a9dfeb4d-a92e-41cf-9d2f-43086cc9e868" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.583s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1797.081492] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052520, 'name': CreateVM_Task, 'duration_secs': 0.381402} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1797.081672] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1797.084021] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1797.084021] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1797.084021] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1797.084021] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2001541-6d81-41ac-988d-d99aac90bef7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.087956] env[62684]: DEBUG oslo_vmware.api [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 1797.087956] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523c82bf-ba32-5775-d0f9-344fe6113d13" [ 1797.087956] env[62684]: _type = "Task" [ 1797.087956] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1797.096492] env[62684]: DEBUG oslo_vmware.api [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523c82bf-ba32-5775-d0f9-344fe6113d13, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.153362] env[62684]: DEBUG oslo_vmware.api [None req-befbfa04-e100-450f-8304-b3734fea6600 tempest-ServersAdminNegativeTestJSON-2140285995 tempest-ServersAdminNegativeTestJSON-2140285995-project-admin] Task: {'id': task-2052517, 'name': SuspendVM_Task, 'duration_secs': 0.805517} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1797.153654] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-befbfa04-e100-450f-8304-b3734fea6600 tempest-ServersAdminNegativeTestJSON-2140285995 tempest-ServersAdminNegativeTestJSON-2140285995-project-admin] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Suspended the VM {{(pid=62684) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1797.153842] env[62684]: DEBUG nova.compute.manager [None req-befbfa04-e100-450f-8304-b3734fea6600 tempest-ServersAdminNegativeTestJSON-2140285995 tempest-ServersAdminNegativeTestJSON-2140285995-project-admin] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1797.154634] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97ab9261-1ffb-42f2-ab72-2e58272ec096 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.415433] env[62684]: DEBUG nova.scheduler.client.report [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1797.473681] env[62684]: DEBUG nova.compute.manager [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1797.599825] env[62684]: DEBUG oslo_vmware.api [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523c82bf-ba32-5775-d0f9-344fe6113d13, 'name': SearchDatastore_Task, 'duration_secs': 0.041748} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1797.600168] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1797.600411] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1797.600648] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1797.600796] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1797.601165] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1797.601460] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ad4b67c2-4938-41b8-8a66-b5560639e111 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.610524] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1797.610741] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1797.611525] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8d47a09-f644-47fc-8405-6683bbdb618b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.619347] env[62684]: DEBUG oslo_vmware.api [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 1797.619347] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b398e5-d1b1-81e2-39d4-3a5e77c1ce0d" [ 1797.619347] env[62684]: _type = "Task" [ 1797.619347] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1797.628388] env[62684]: DEBUG oslo_vmware.api [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b398e5-d1b1-81e2-39d4-3a5e77c1ce0d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.922163] env[62684]: DEBUG oslo_concurrency.lockutils [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.740s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1797.923611] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.918s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1797.925365] env[62684]: INFO nova.compute.claims [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1797.943788] env[62684]: DEBUG nova.compute.manager [req-df6c19cb-6ef9-4622-88af-b9818d0bb98d req-516f33ad-21ea-47ab-956d-2166598cde23 service nova] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Received event network-changed-220c5589-f035-4097-8c0d-dfd565a9203a {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1797.944774] env[62684]: DEBUG nova.compute.manager [req-df6c19cb-6ef9-4622-88af-b9818d0bb98d req-516f33ad-21ea-47ab-956d-2166598cde23 service nova] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Refreshing instance network info cache due to event network-changed-220c5589-f035-4097-8c0d-dfd565a9203a. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1797.945340] env[62684]: DEBUG oslo_concurrency.lockutils [req-df6c19cb-6ef9-4622-88af-b9818d0bb98d req-516f33ad-21ea-47ab-956d-2166598cde23 service nova] Acquiring lock "refresh_cache-4e5152b0-7bac-4dc2-b6c7-6590fa2d5978" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1797.945574] env[62684]: DEBUG oslo_concurrency.lockutils [req-df6c19cb-6ef9-4622-88af-b9818d0bb98d req-516f33ad-21ea-47ab-956d-2166598cde23 service nova] Acquired lock "refresh_cache-4e5152b0-7bac-4dc2-b6c7-6590fa2d5978" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1797.945785] env[62684]: DEBUG nova.network.neutron [req-df6c19cb-6ef9-4622-88af-b9818d0bb98d req-516f33ad-21ea-47ab-956d-2166598cde23 service nova] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Refreshing network info cache for port 220c5589-f035-4097-8c0d-dfd565a9203a {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1797.997452] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1798.131580] env[62684]: DEBUG oslo_vmware.api [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b398e5-d1b1-81e2-39d4-3a5e77c1ce0d, 'name': SearchDatastore_Task, 'duration_secs': 0.013048} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1798.132401] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36d44ee7-6441-4422-a302-3787cb1d87c6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.137901] env[62684]: DEBUG oslo_vmware.api [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 1798.137901] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]522b9ced-25d1-9d5a-1c6d-c02a76c146dd" [ 1798.137901] env[62684]: _type = "Task" [ 1798.137901] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.145311] env[62684]: DEBUG oslo_vmware.api [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]522b9ced-25d1-9d5a-1c6d-c02a76c146dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.430069] env[62684]: DEBUG oslo_concurrency.lockutils [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Acquiring lock "15592abb-fc85-47dc-882c-74565d4b5fb7" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1798.430412] env[62684]: DEBUG oslo_concurrency.lockutils [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Lock "15592abb-fc85-47dc-882c-74565d4b5fb7" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1798.650613] env[62684]: DEBUG oslo_vmware.api [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]522b9ced-25d1-9d5a-1c6d-c02a76c146dd, 'name': SearchDatastore_Task, 'duration_secs': 0.021766} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1798.650890] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1798.651190] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978/4e5152b0-7bac-4dc2-b6c7-6590fa2d5978.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1798.652557] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9af9b818-ae48-42c1-89f4-5da6999d444e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.658787] env[62684]: DEBUG oslo_vmware.api [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 1798.658787] env[62684]: value = "task-2052521" [ 1798.658787] env[62684]: _type = "Task" [ 1798.658787] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.667707] env[62684]: DEBUG oslo_vmware.api [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2052521, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.731311] env[62684]: DEBUG nova.network.neutron [req-df6c19cb-6ef9-4622-88af-b9818d0bb98d req-516f33ad-21ea-47ab-956d-2166598cde23 service nova] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Updated VIF entry in instance network info cache for port 220c5589-f035-4097-8c0d-dfd565a9203a. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1798.731770] env[62684]: DEBUG nova.network.neutron [req-df6c19cb-6ef9-4622-88af-b9818d0bb98d req-516f33ad-21ea-47ab-956d-2166598cde23 service nova] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Updating instance_info_cache with network_info: [{"id": "220c5589-f035-4097-8c0d-dfd565a9203a", "address": "fa:16:3e:3e:7c:34", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap220c5589-f0", "ovs_interfaceid": "220c5589-f035-4097-8c0d-dfd565a9203a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1798.934855] env[62684]: DEBUG oslo_concurrency.lockutils [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Lock "15592abb-fc85-47dc-882c-74565d4b5fb7" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.504s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1798.935639] env[62684]: DEBUG nova.compute.manager [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1799.180251] env[62684]: DEBUG oslo_vmware.api [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2052521, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.234602] env[62684]: DEBUG oslo_concurrency.lockutils [req-df6c19cb-6ef9-4622-88af-b9818d0bb98d req-516f33ad-21ea-47ab-956d-2166598cde23 service nova] Releasing lock "refresh_cache-4e5152b0-7bac-4dc2-b6c7-6590fa2d5978" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1799.235231] env[62684]: DEBUG nova.compute.manager [req-df6c19cb-6ef9-4622-88af-b9818d0bb98d req-516f33ad-21ea-47ab-956d-2166598cde23 service nova] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Received event network-changed-f30c0c93-502e-48a6-b8f1-c44350487322 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1799.235231] env[62684]: DEBUG nova.compute.manager [req-df6c19cb-6ef9-4622-88af-b9818d0bb98d req-516f33ad-21ea-47ab-956d-2166598cde23 service nova] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Refreshing instance network info cache due to event network-changed-f30c0c93-502e-48a6-b8f1-c44350487322. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1799.235372] env[62684]: DEBUG oslo_concurrency.lockutils [req-df6c19cb-6ef9-4622-88af-b9818d0bb98d req-516f33ad-21ea-47ab-956d-2166598cde23 service nova] Acquiring lock "refresh_cache-17d30180-9770-4329-a6d8-757a93514a96" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1799.235498] env[62684]: DEBUG oslo_concurrency.lockutils [req-df6c19cb-6ef9-4622-88af-b9818d0bb98d req-516f33ad-21ea-47ab-956d-2166598cde23 service nova] Acquired lock "refresh_cache-17d30180-9770-4329-a6d8-757a93514a96" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1799.235667] env[62684]: DEBUG nova.network.neutron [req-df6c19cb-6ef9-4622-88af-b9818d0bb98d req-516f33ad-21ea-47ab-956d-2166598cde23 service nova] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Refreshing network info cache for port f30c0c93-502e-48a6-b8f1-c44350487322 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1799.443539] env[62684]: DEBUG nova.compute.utils [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1799.444865] env[62684]: DEBUG nova.compute.manager [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1799.446250] env[62684]: DEBUG nova.network.neutron [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1799.510062] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d359a33f-2681-43f5-9261-06c04cadc5e5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.519086] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dc62dba-09cf-429b-be25-0087d9b72c04 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.554716] env[62684]: DEBUG nova.policy [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'af91f968b03341a789b6f2cd03d2fb14', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '53e7a6c764124be0993623bea3a56561', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1799.557040] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dd65cc9-b363-4438-8260-cfa3d4108e95 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.560347] env[62684]: DEBUG nova.compute.manager [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1799.561193] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58acc0d7-1b1b-4025-a010-ebc57674a1e3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.571653] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29b4114d-33eb-4681-80b0-84d610369cd6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.589320] env[62684]: DEBUG nova.compute.provider_tree [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1799.672627] env[62684]: DEBUG oslo_vmware.api [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2052521, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.635789} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.672857] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978/4e5152b0-7bac-4dc2-b6c7-6590fa2d5978.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1799.673119] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1799.673490] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-23cd627b-8900-406b-b205-4090b20b7582 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.679979] env[62684]: DEBUG oslo_vmware.api [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 1799.679979] env[62684]: value = "task-2052522" [ 1799.679979] env[62684]: _type = "Task" [ 1799.679979] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.687982] env[62684]: DEBUG oslo_vmware.api [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2052522, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.950261] env[62684]: DEBUG nova.compute.manager [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1800.082750] env[62684]: DEBUG nova.network.neutron [req-df6c19cb-6ef9-4622-88af-b9818d0bb98d req-516f33ad-21ea-47ab-956d-2166598cde23 service nova] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Updated VIF entry in instance network info cache for port f30c0c93-502e-48a6-b8f1-c44350487322. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1800.083209] env[62684]: DEBUG nova.network.neutron [req-df6c19cb-6ef9-4622-88af-b9818d0bb98d req-516f33ad-21ea-47ab-956d-2166598cde23 service nova] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Updating instance_info_cache with network_info: [{"id": "f30c0c93-502e-48a6-b8f1-c44350487322", "address": "fa:16:3e:e2:b9:d4", "network": {"id": "4dbf6e66-d1d2-4a75-95f1-83ee056bb69a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1488549197-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a997a73719f4eb5b3e35640f6c9f57f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d377d75-3add-4a15-8691-74b2eb010924", "external-id": "nsx-vlan-transportzone-71", "segmentation_id": 71, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf30c0c93-50", "ovs_interfaceid": "f30c0c93-502e-48a6-b8f1-c44350487322", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1800.085136] env[62684]: INFO nova.compute.manager [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] instance snapshotting [ 1800.089324] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28fbd574-8425-4999-be9c-99467b1ef495 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.092570] env[62684]: DEBUG nova.scheduler.client.report [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1800.112082] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4198fd3-3253-46f3-9b01-967860358bb4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.189517] env[62684]: DEBUG oslo_vmware.api [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2052522, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063402} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1800.189853] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1800.190635] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed56a713-da11-4e2e-940d-17b97ff56a7a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.216827] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Reconfiguring VM instance instance-00000019 to attach disk [datastore2] 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978/4e5152b0-7bac-4dc2-b6c7-6590fa2d5978.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1800.216827] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-def15e52-72fe-4c1a-850a-e62abeb5d489 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.247562] env[62684]: DEBUG oslo_vmware.api [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 1800.247562] env[62684]: value = "task-2052523" [ 1800.247562] env[62684]: _type = "Task" [ 1800.247562] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.258960] env[62684]: DEBUG oslo_vmware.api [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2052523, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.277484] env[62684]: DEBUG nova.network.neutron [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Successfully created port: b474f9ed-1ee0-4186-9dd7-336689da4726 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1800.450721] env[62684]: DEBUG nova.compute.manager [req-1c96f4cc-f9b3-44d4-a631-77d371e227fc req-ff902eb5-528a-4240-a4a6-35f7e95e25cf service nova] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Received event network-changed-f30c0c93-502e-48a6-b8f1-c44350487322 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1800.451232] env[62684]: DEBUG nova.compute.manager [req-1c96f4cc-f9b3-44d4-a631-77d371e227fc req-ff902eb5-528a-4240-a4a6-35f7e95e25cf service nova] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Refreshing instance network info cache due to event network-changed-f30c0c93-502e-48a6-b8f1-c44350487322. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1800.451232] env[62684]: DEBUG oslo_concurrency.lockutils [req-1c96f4cc-f9b3-44d4-a631-77d371e227fc req-ff902eb5-528a-4240-a4a6-35f7e95e25cf service nova] Acquiring lock "refresh_cache-17d30180-9770-4329-a6d8-757a93514a96" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1800.587968] env[62684]: DEBUG oslo_concurrency.lockutils [req-df6c19cb-6ef9-4622-88af-b9818d0bb98d req-516f33ad-21ea-47ab-956d-2166598cde23 service nova] Releasing lock "refresh_cache-17d30180-9770-4329-a6d8-757a93514a96" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1800.588449] env[62684]: DEBUG oslo_concurrency.lockutils [req-1c96f4cc-f9b3-44d4-a631-77d371e227fc req-ff902eb5-528a-4240-a4a6-35f7e95e25cf service nova] Acquired lock "refresh_cache-17d30180-9770-4329-a6d8-757a93514a96" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1800.588786] env[62684]: DEBUG nova.network.neutron [req-1c96f4cc-f9b3-44d4-a631-77d371e227fc req-ff902eb5-528a-4240-a4a6-35f7e95e25cf service nova] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Refreshing network info cache for port f30c0c93-502e-48a6-b8f1-c44350487322 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1800.597715] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.674s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1800.598255] env[62684]: DEBUG nova.compute.manager [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1800.601820] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.999s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1800.603215] env[62684]: INFO nova.compute.claims [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1800.622966] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Creating Snapshot of the VM instance {{(pid=62684) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1800.623223] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-71e53ce8-65bb-4352-a4de-7230567a30f3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.632556] env[62684]: DEBUG oslo_vmware.api [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Waiting for the task: (returnval){ [ 1800.632556] env[62684]: value = "task-2052524" [ 1800.632556] env[62684]: _type = "Task" [ 1800.632556] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.643179] env[62684]: DEBUG oslo_vmware.api [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052524, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.757676] env[62684]: DEBUG oslo_vmware.api [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2052523, 'name': ReconfigVM_Task, 'duration_secs': 0.305291} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1800.758696] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Reconfigured VM instance instance-00000019 to attach disk [datastore2] 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978/4e5152b0-7bac-4dc2-b6c7-6590fa2d5978.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1800.759586] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f60fb632-dd4a-47aa-a81b-70923176d0b1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.765692] env[62684]: DEBUG oslo_vmware.api [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 1800.765692] env[62684]: value = "task-2052525" [ 1800.765692] env[62684]: _type = "Task" [ 1800.765692] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.774443] env[62684]: DEBUG oslo_vmware.api [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2052525, 'name': Rename_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.960476] env[62684]: DEBUG nova.compute.manager [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1801.002566] env[62684]: DEBUG nova.virt.hardware [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1801.002566] env[62684]: DEBUG nova.virt.hardware [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1801.002713] env[62684]: DEBUG nova.virt.hardware [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1801.004384] env[62684]: DEBUG nova.virt.hardware [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1801.004384] env[62684]: DEBUG nova.virt.hardware [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1801.004384] env[62684]: DEBUG nova.virt.hardware [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1801.004384] env[62684]: DEBUG nova.virt.hardware [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1801.004568] env[62684]: DEBUG nova.virt.hardware [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1801.004603] env[62684]: DEBUG nova.virt.hardware [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1801.004743] env[62684]: DEBUG nova.virt.hardware [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1801.004917] env[62684]: DEBUG nova.virt.hardware [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1801.005842] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2db11a8-3d8a-4693-a174-810d85bc93c8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.017522] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3120ef87-1bcb-48c9-b5bd-ef733feb4c44 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.104226] env[62684]: DEBUG nova.compute.utils [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1801.105136] env[62684]: DEBUG nova.compute.manager [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1801.105220] env[62684]: DEBUG nova.network.neutron [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1801.148176] env[62684]: DEBUG oslo_vmware.api [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052524, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.153296] env[62684]: DEBUG nova.policy [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '216bd380a88d46debded7e1a6b59734c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '41ab694b1d944a9fb8206fa6e4461e78', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1801.276633] env[62684]: DEBUG oslo_vmware.api [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2052525, 'name': Rename_Task, 'duration_secs': 0.13478} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.276833] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1801.277530] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-01164600-2708-4453-9b65-4017fa859a92 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.286085] env[62684]: DEBUG oslo_vmware.api [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 1801.286085] env[62684]: value = "task-2052526" [ 1801.286085] env[62684]: _type = "Task" [ 1801.286085] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.295593] env[62684]: DEBUG oslo_vmware.api [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2052526, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.611978] env[62684]: DEBUG nova.compute.manager [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1801.650184] env[62684]: DEBUG oslo_vmware.api [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052524, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.658667] env[62684]: DEBUG nova.network.neutron [req-1c96f4cc-f9b3-44d4-a631-77d371e227fc req-ff902eb5-528a-4240-a4a6-35f7e95e25cf service nova] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Updated VIF entry in instance network info cache for port f30c0c93-502e-48a6-b8f1-c44350487322. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1801.658667] env[62684]: DEBUG nova.network.neutron [req-1c96f4cc-f9b3-44d4-a631-77d371e227fc req-ff902eb5-528a-4240-a4a6-35f7e95e25cf service nova] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Updating instance_info_cache with network_info: [{"id": "f30c0c93-502e-48a6-b8f1-c44350487322", "address": "fa:16:3e:e2:b9:d4", "network": {"id": "4dbf6e66-d1d2-4a75-95f1-83ee056bb69a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1488549197-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a997a73719f4eb5b3e35640f6c9f57f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d377d75-3add-4a15-8691-74b2eb010924", "external-id": "nsx-vlan-transportzone-71", "segmentation_id": 71, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf30c0c93-50", "ovs_interfaceid": "f30c0c93-502e-48a6-b8f1-c44350487322", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1801.795699] env[62684]: DEBUG nova.network.neutron [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Successfully created port: 04643826-a651-4eba-be4f-57825aa4f302 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1801.801395] env[62684]: DEBUG oslo_vmware.api [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2052526, 'name': PowerOnVM_Task, 'duration_secs': 0.471934} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.803718] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1801.803937] env[62684]: INFO nova.compute.manager [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Took 8.98 seconds to spawn the instance on the hypervisor. [ 1801.804155] env[62684]: DEBUG nova.compute.manager [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1801.805382] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e08fd16c-02d2-4719-97a8-8e078c3179fd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.151490] env[62684]: DEBUG oslo_vmware.api [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052524, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.163948] env[62684]: DEBUG oslo_concurrency.lockutils [req-1c96f4cc-f9b3-44d4-a631-77d371e227fc req-ff902eb5-528a-4240-a4a6-35f7e95e25cf service nova] Releasing lock "refresh_cache-17d30180-9770-4329-a6d8-757a93514a96" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1802.259284] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1eb18e5-3eda-4539-ac39-063ae83dbe32 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.269967] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89d0927d-ec3d-4ff3-aa11-a14d624e8f63 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.309904] env[62684]: DEBUG nova.network.neutron [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Successfully updated port: b474f9ed-1ee0-4186-9dd7-336689da4726 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1802.311615] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fd2e76b-7a32-4bc6-8306-3275f0c9a8f4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.323691] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bba8103a-0464-4314-a45e-6dc620613d53 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.331127] env[62684]: INFO nova.compute.manager [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Took 36.50 seconds to build instance. [ 1802.341590] env[62684]: DEBUG nova.compute.provider_tree [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1802.347173] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1802.347439] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1802.616705] env[62684]: DEBUG nova.compute.manager [req-dc2f4d7b-52cc-4b96-a858-1c3e5a8fe2a6 req-efd33637-c0e7-4c0f-8b77-91f6f2ff75fb service nova] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Received event network-vif-plugged-b474f9ed-1ee0-4186-9dd7-336689da4726 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1802.616932] env[62684]: DEBUG oslo_concurrency.lockutils [req-dc2f4d7b-52cc-4b96-a858-1c3e5a8fe2a6 req-efd33637-c0e7-4c0f-8b77-91f6f2ff75fb service nova] Acquiring lock "8c046991-b294-4f33-9fce-a241984d66d7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1802.617150] env[62684]: DEBUG oslo_concurrency.lockutils [req-dc2f4d7b-52cc-4b96-a858-1c3e5a8fe2a6 req-efd33637-c0e7-4c0f-8b77-91f6f2ff75fb service nova] Lock "8c046991-b294-4f33-9fce-a241984d66d7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1802.617320] env[62684]: DEBUG oslo_concurrency.lockutils [req-dc2f4d7b-52cc-4b96-a858-1c3e5a8fe2a6 req-efd33637-c0e7-4c0f-8b77-91f6f2ff75fb service nova] Lock "8c046991-b294-4f33-9fce-a241984d66d7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1802.617675] env[62684]: DEBUG nova.compute.manager [req-dc2f4d7b-52cc-4b96-a858-1c3e5a8fe2a6 req-efd33637-c0e7-4c0f-8b77-91f6f2ff75fb service nova] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] No waiting events found dispatching network-vif-plugged-b474f9ed-1ee0-4186-9dd7-336689da4726 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1802.617675] env[62684]: WARNING nova.compute.manager [req-dc2f4d7b-52cc-4b96-a858-1c3e5a8fe2a6 req-efd33637-c0e7-4c0f-8b77-91f6f2ff75fb service nova] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Received unexpected event network-vif-plugged-b474f9ed-1ee0-4186-9dd7-336689da4726 for instance with vm_state building and task_state spawning. [ 1802.617802] env[62684]: DEBUG nova.compute.manager [req-dc2f4d7b-52cc-4b96-a858-1c3e5a8fe2a6 req-efd33637-c0e7-4c0f-8b77-91f6f2ff75fb service nova] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Received event network-changed-b474f9ed-1ee0-4186-9dd7-336689da4726 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1802.617954] env[62684]: DEBUG nova.compute.manager [req-dc2f4d7b-52cc-4b96-a858-1c3e5a8fe2a6 req-efd33637-c0e7-4c0f-8b77-91f6f2ff75fb service nova] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Refreshing instance network info cache due to event network-changed-b474f9ed-1ee0-4186-9dd7-336689da4726. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1802.619439] env[62684]: DEBUG oslo_concurrency.lockutils [req-dc2f4d7b-52cc-4b96-a858-1c3e5a8fe2a6 req-efd33637-c0e7-4c0f-8b77-91f6f2ff75fb service nova] Acquiring lock "refresh_cache-8c046991-b294-4f33-9fce-a241984d66d7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1802.619687] env[62684]: DEBUG oslo_concurrency.lockutils [req-dc2f4d7b-52cc-4b96-a858-1c3e5a8fe2a6 req-efd33637-c0e7-4c0f-8b77-91f6f2ff75fb service nova] Acquired lock "refresh_cache-8c046991-b294-4f33-9fce-a241984d66d7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1802.620106] env[62684]: DEBUG nova.network.neutron [req-dc2f4d7b-52cc-4b96-a858-1c3e5a8fe2a6 req-efd33637-c0e7-4c0f-8b77-91f6f2ff75fb service nova] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Refreshing network info cache for port b474f9ed-1ee0-4186-9dd7-336689da4726 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1802.630983] env[62684]: DEBUG nova.compute.manager [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1802.648253] env[62684]: DEBUG oslo_vmware.api [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052524, 'name': CreateSnapshot_Task, 'duration_secs': 1.545504} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1802.650602] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Created Snapshot of the VM instance {{(pid=62684) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1802.651933] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45162363-307b-409f-bb94-c0051dcb89b9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.658098] env[62684]: DEBUG nova.virt.hardware [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1802.658098] env[62684]: DEBUG nova.virt.hardware [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1802.658098] env[62684]: DEBUG nova.virt.hardware [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1802.658098] env[62684]: DEBUG nova.virt.hardware [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1802.658412] env[62684]: DEBUG nova.virt.hardware [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1802.658412] env[62684]: DEBUG nova.virt.hardware [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1802.659034] env[62684]: DEBUG nova.virt.hardware [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1802.659034] env[62684]: DEBUG nova.virt.hardware [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1802.659034] env[62684]: DEBUG nova.virt.hardware [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1802.659301] env[62684]: DEBUG nova.virt.hardware [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1802.659548] env[62684]: DEBUG nova.virt.hardware [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1802.660583] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a00de2d9-8ea1-4cd7-8482-9687e2cf7498 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.675720] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90b870bf-03f5-4001-b805-9b6691010927 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.729248] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f6d61b2e-b4ea-41c4-a3ce-d05ef53dfd04 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Acquiring lock "17d30180-9770-4329-a6d8-757a93514a96" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1802.729545] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f6d61b2e-b4ea-41c4-a3ce-d05ef53dfd04 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Lock "17d30180-9770-4329-a6d8-757a93514a96" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1802.729758] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f6d61b2e-b4ea-41c4-a3ce-d05ef53dfd04 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Acquiring lock "17d30180-9770-4329-a6d8-757a93514a96-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1802.729942] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f6d61b2e-b4ea-41c4-a3ce-d05ef53dfd04 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Lock "17d30180-9770-4329-a6d8-757a93514a96-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1802.730132] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f6d61b2e-b4ea-41c4-a3ce-d05ef53dfd04 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Lock "17d30180-9770-4329-a6d8-757a93514a96-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1802.732614] env[62684]: INFO nova.compute.manager [None req-f6d61b2e-b4ea-41c4-a3ce-d05ef53dfd04 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Terminating instance [ 1802.734942] env[62684]: DEBUG nova.compute.manager [None req-f6d61b2e-b4ea-41c4-a3ce-d05ef53dfd04 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1802.735129] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f6d61b2e-b4ea-41c4-a3ce-d05ef53dfd04 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1802.735987] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37d06131-76ba-4458-a71f-b654818c584c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.744092] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6d61b2e-b4ea-41c4-a3ce-d05ef53dfd04 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1802.744338] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bdc6c407-8f37-40e2-b257-ca2e7d349c70 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.750988] env[62684]: DEBUG oslo_vmware.api [None req-f6d61b2e-b4ea-41c4-a3ce-d05ef53dfd04 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Waiting for the task: (returnval){ [ 1802.750988] env[62684]: value = "task-2052527" [ 1802.750988] env[62684]: _type = "Task" [ 1802.750988] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1802.761942] env[62684]: DEBUG oslo_vmware.api [None req-f6d61b2e-b4ea-41c4-a3ce-d05ef53dfd04 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Task: {'id': task-2052527, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.815472] env[62684]: DEBUG oslo_concurrency.lockutils [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Acquiring lock "refresh_cache-8c046991-b294-4f33-9fce-a241984d66d7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1802.834278] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4917d141-84bb-4ea9-8e61-302619c36323 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "4e5152b0-7bac-4dc2-b6c7-6590fa2d5978" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.905s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1802.849137] env[62684]: DEBUG nova.scheduler.client.report [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1803.162268] env[62684]: DEBUG nova.network.neutron [req-dc2f4d7b-52cc-4b96-a858-1c3e5a8fe2a6 req-efd33637-c0e7-4c0f-8b77-91f6f2ff75fb service nova] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1803.186780] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Creating linked-clone VM from snapshot {{(pid=62684) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1803.187317] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-db43a020-3a86-4496-8922-95da93aee784 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.197157] env[62684]: DEBUG oslo_vmware.api [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Waiting for the task: (returnval){ [ 1803.197157] env[62684]: value = "task-2052528" [ 1803.197157] env[62684]: _type = "Task" [ 1803.197157] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1803.207418] env[62684]: DEBUG oslo_vmware.api [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052528, 'name': CloneVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.261344] env[62684]: DEBUG oslo_vmware.api [None req-f6d61b2e-b4ea-41c4-a3ce-d05ef53dfd04 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Task: {'id': task-2052527, 'name': PowerOffVM_Task, 'duration_secs': 0.263076} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1803.261344] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6d61b2e-b4ea-41c4-a3ce-d05ef53dfd04 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1803.262197] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f6d61b2e-b4ea-41c4-a3ce-d05ef53dfd04 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1803.262651] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cb81e578-c8bc-4eb5-9e99-7cc5fb3289dc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.332767] env[62684]: DEBUG nova.network.neutron [req-dc2f4d7b-52cc-4b96-a858-1c3e5a8fe2a6 req-efd33637-c0e7-4c0f-8b77-91f6f2ff75fb service nova] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1803.337422] env[62684]: DEBUG nova.compute.manager [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1803.361377] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.759s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1803.361918] env[62684]: DEBUG nova.compute.manager [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1803.365077] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f0f23944-6afb-480a-8b3f-1f918b7b1dc3 tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.533s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1803.365348] env[62684]: DEBUG nova.objects.instance [None req-f0f23944-6afb-480a-8b3f-1f918b7b1dc3 tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Lazy-loading 'resources' on Instance uuid aec16a15-5d75-4ea6-800b-1bf67f762d89 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1803.385219] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5304a8c6-4bc7-4303-9b48-aa2fdb45bd63 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Acquiring lock "ca22ca59-1b60-46f0-ae83-03ed4002fa0d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1803.385569] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5304a8c6-4bc7-4303-9b48-aa2fdb45bd63 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Lock "ca22ca59-1b60-46f0-ae83-03ed4002fa0d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1803.385785] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5304a8c6-4bc7-4303-9b48-aa2fdb45bd63 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Acquiring lock "ca22ca59-1b60-46f0-ae83-03ed4002fa0d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1803.386696] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5304a8c6-4bc7-4303-9b48-aa2fdb45bd63 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Lock "ca22ca59-1b60-46f0-ae83-03ed4002fa0d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1803.386964] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5304a8c6-4bc7-4303-9b48-aa2fdb45bd63 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Lock "ca22ca59-1b60-46f0-ae83-03ed4002fa0d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1803.390326] env[62684]: DEBUG nova.compute.manager [req-ef619d2c-1b01-4ac6-9a34-5901cc9a447e req-ea8f507b-b0cb-4eb6-af25-6472e8004007 service nova] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Received event network-vif-plugged-04643826-a651-4eba-be4f-57825aa4f302 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1803.390520] env[62684]: DEBUG oslo_concurrency.lockutils [req-ef619d2c-1b01-4ac6-9a34-5901cc9a447e req-ea8f507b-b0cb-4eb6-af25-6472e8004007 service nova] Acquiring lock "f44b2e88-af6d-4252-b562-9d5fa7745b56-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1803.390713] env[62684]: DEBUG oslo_concurrency.lockutils [req-ef619d2c-1b01-4ac6-9a34-5901cc9a447e req-ea8f507b-b0cb-4eb6-af25-6472e8004007 service nova] Lock "f44b2e88-af6d-4252-b562-9d5fa7745b56-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1803.390875] env[62684]: DEBUG oslo_concurrency.lockutils [req-ef619d2c-1b01-4ac6-9a34-5901cc9a447e req-ea8f507b-b0cb-4eb6-af25-6472e8004007 service nova] Lock "f44b2e88-af6d-4252-b562-9d5fa7745b56-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1803.391097] env[62684]: DEBUG nova.compute.manager [req-ef619d2c-1b01-4ac6-9a34-5901cc9a447e req-ea8f507b-b0cb-4eb6-af25-6472e8004007 service nova] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] No waiting events found dispatching network-vif-plugged-04643826-a651-4eba-be4f-57825aa4f302 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1803.391255] env[62684]: WARNING nova.compute.manager [req-ef619d2c-1b01-4ac6-9a34-5901cc9a447e req-ea8f507b-b0cb-4eb6-af25-6472e8004007 service nova] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Received unexpected event network-vif-plugged-04643826-a651-4eba-be4f-57825aa4f302 for instance with vm_state building and task_state spawning. [ 1803.393666] env[62684]: INFO nova.compute.manager [None req-5304a8c6-4bc7-4303-9b48-aa2fdb45bd63 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Terminating instance [ 1803.395864] env[62684]: DEBUG nova.compute.manager [None req-5304a8c6-4bc7-4303-9b48-aa2fdb45bd63 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1803.396100] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5304a8c6-4bc7-4303-9b48-aa2fdb45bd63 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1803.396961] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8216e868-c3a8-4799-8670-4cedd1cbdd02 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.406127] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5304a8c6-4bc7-4303-9b48-aa2fdb45bd63 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1803.406964] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0e66a9df-1b6e-4b36-a668-3e0d7f15cc7a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.460964] env[62684]: DEBUG nova.network.neutron [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Successfully updated port: 04643826-a651-4eba-be4f-57825aa4f302 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1803.601408] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f6d61b2e-b4ea-41c4-a3ce-d05ef53dfd04 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1803.601655] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f6d61b2e-b4ea-41c4-a3ce-d05ef53dfd04 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1803.601849] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6d61b2e-b4ea-41c4-a3ce-d05ef53dfd04 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Deleting the datastore file [datastore1] 17d30180-9770-4329-a6d8-757a93514a96 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1803.602154] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-086d224c-8663-4a18-9f32-4f00d399321c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.607254] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5304a8c6-4bc7-4303-9b48-aa2fdb45bd63 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1803.607254] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5304a8c6-4bc7-4303-9b48-aa2fdb45bd63 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1803.607254] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-5304a8c6-4bc7-4303-9b48-aa2fdb45bd63 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Deleting the datastore file [datastore1] ca22ca59-1b60-46f0-ae83-03ed4002fa0d {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1803.607887] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-18e3ed08-a536-4791-8056-99195911f15e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.612339] env[62684]: DEBUG oslo_vmware.api [None req-f6d61b2e-b4ea-41c4-a3ce-d05ef53dfd04 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Waiting for the task: (returnval){ [ 1803.612339] env[62684]: value = "task-2052531" [ 1803.612339] env[62684]: _type = "Task" [ 1803.612339] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1803.617876] env[62684]: DEBUG oslo_vmware.api [None req-5304a8c6-4bc7-4303-9b48-aa2fdb45bd63 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Waiting for the task: (returnval){ [ 1803.617876] env[62684]: value = "task-2052532" [ 1803.617876] env[62684]: _type = "Task" [ 1803.617876] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1803.626827] env[62684]: DEBUG oslo_vmware.api [None req-f6d61b2e-b4ea-41c4-a3ce-d05ef53dfd04 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Task: {'id': task-2052531, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.636720] env[62684]: DEBUG oslo_vmware.api [None req-5304a8c6-4bc7-4303-9b48-aa2fdb45bd63 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': task-2052532, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.708273] env[62684]: DEBUG oslo_vmware.api [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052528, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.836067] env[62684]: DEBUG oslo_concurrency.lockutils [req-dc2f4d7b-52cc-4b96-a858-1c3e5a8fe2a6 req-efd33637-c0e7-4c0f-8b77-91f6f2ff75fb service nova] Releasing lock "refresh_cache-8c046991-b294-4f33-9fce-a241984d66d7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1803.836352] env[62684]: DEBUG oslo_concurrency.lockutils [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Acquired lock "refresh_cache-8c046991-b294-4f33-9fce-a241984d66d7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1803.836515] env[62684]: DEBUG nova.network.neutron [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1803.870555] env[62684]: DEBUG nova.compute.utils [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1803.876173] env[62684]: DEBUG nova.compute.manager [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1803.876392] env[62684]: DEBUG nova.network.neutron [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1803.879526] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1803.928479] env[62684]: DEBUG nova.policy [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '20643b7ff50e4defac3ebb2176cc5cf2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5513297908414207afa484e075bd26e8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1803.963513] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Acquiring lock "refresh_cache-f44b2e88-af6d-4252-b562-9d5fa7745b56" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1803.963661] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Acquired lock "refresh_cache-f44b2e88-af6d-4252-b562-9d5fa7745b56" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1803.963811] env[62684]: DEBUG nova.network.neutron [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1804.130293] env[62684]: DEBUG oslo_vmware.api [None req-5304a8c6-4bc7-4303-9b48-aa2fdb45bd63 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': task-2052532, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.207513} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1804.130596] env[62684]: DEBUG oslo_vmware.api [None req-f6d61b2e-b4ea-41c4-a3ce-d05ef53dfd04 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Task: {'id': task-2052531, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.198089} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1804.133217] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-5304a8c6-4bc7-4303-9b48-aa2fdb45bd63 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1804.133490] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5304a8c6-4bc7-4303-9b48-aa2fdb45bd63 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1804.133738] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5304a8c6-4bc7-4303-9b48-aa2fdb45bd63 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1804.133974] env[62684]: INFO nova.compute.manager [None req-5304a8c6-4bc7-4303-9b48-aa2fdb45bd63 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Took 0.74 seconds to destroy the instance on the hypervisor. [ 1804.134317] env[62684]: DEBUG oslo.service.loopingcall [None req-5304a8c6-4bc7-4303-9b48-aa2fdb45bd63 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1804.134571] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6d61b2e-b4ea-41c4-a3ce-d05ef53dfd04 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1804.134796] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f6d61b2e-b4ea-41c4-a3ce-d05ef53dfd04 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1804.135019] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f6d61b2e-b4ea-41c4-a3ce-d05ef53dfd04 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1804.135309] env[62684]: INFO nova.compute.manager [None req-f6d61b2e-b4ea-41c4-a3ce-d05ef53dfd04 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Took 1.40 seconds to destroy the instance on the hypervisor. [ 1804.135997] env[62684]: DEBUG oslo.service.loopingcall [None req-f6d61b2e-b4ea-41c4-a3ce-d05ef53dfd04 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1804.136341] env[62684]: DEBUG nova.compute.manager [-] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1804.136506] env[62684]: DEBUG nova.network.neutron [-] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1804.138084] env[62684]: DEBUG nova.compute.manager [-] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1804.138177] env[62684]: DEBUG nova.network.neutron [-] [instance: 17d30180-9770-4329-a6d8-757a93514a96] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1804.208062] env[62684]: DEBUG oslo_vmware.api [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052528, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.306986] env[62684]: DEBUG nova.network.neutron [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Successfully created port: 140f3118-d3fd-4be3-a661-89c3cac2cb26 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1804.379603] env[62684]: DEBUG nova.compute.manager [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1804.450789] env[62684]: DEBUG nova.network.neutron [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1804.487152] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0acf0c6-f7c6-432d-be8b-a85625f4e4af {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.495765] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c9cc0f3-d6ed-407d-b0fd-0680f4c4096d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.532265] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2d7557b-9d07-46ee-baef-f5e267bba670 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.540996] env[62684]: DEBUG nova.network.neutron [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1804.545582] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b700cde9-f6d2-4df8-81d2-8007d02e650b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.563448] env[62684]: DEBUG nova.compute.provider_tree [None req-f0f23944-6afb-480a-8b3f-1f918b7b1dc3 tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1804.711745] env[62684]: DEBUG oslo_vmware.api [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052528, 'name': CloneVM_Task} progress is 95%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.833747] env[62684]: DEBUG nova.network.neutron [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Updating instance_info_cache with network_info: [{"id": "04643826-a651-4eba-be4f-57825aa4f302", "address": "fa:16:3e:c9:b1:61", "network": {"id": "09056ab7-a8ec-4de0-bd66-9437b6d91612", "bridge": "br-int", "label": "tempest-ServersTestJSON-198717452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41ab694b1d944a9fb8206fa6e4461e78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap04643826-a6", "ovs_interfaceid": "04643826-a651-4eba-be4f-57825aa4f302", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1804.867018] env[62684]: DEBUG nova.network.neutron [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Updating instance_info_cache with network_info: [{"id": "b474f9ed-1ee0-4186-9dd7-336689da4726", "address": "fa:16:3e:a0:06:f4", "network": {"id": "69e30a6f-44ce-4db9-8a6c-6fe6a85d2f08", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-359372498-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53e7a6c764124be0993623bea3a56561", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "21310d90-efbc-45a8-a97f-c4358606530f", "external-id": "nsx-vlan-transportzone-672", "segmentation_id": 672, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb474f9ed-1e", "ovs_interfaceid": "b474f9ed-1ee0-4186-9dd7-336689da4726", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1805.070406] env[62684]: DEBUG nova.scheduler.client.report [None req-f0f23944-6afb-480a-8b3f-1f918b7b1dc3 tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1805.098321] env[62684]: DEBUG nova.compute.manager [req-9467f7f8-012d-41a8-8d38-11f6b3b6e2bd req-f3b29062-9120-4bd1-a707-4203167c2d7e service nova] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Received event network-changed-220c5589-f035-4097-8c0d-dfd565a9203a {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1805.098511] env[62684]: DEBUG nova.compute.manager [req-9467f7f8-012d-41a8-8d38-11f6b3b6e2bd req-f3b29062-9120-4bd1-a707-4203167c2d7e service nova] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Refreshing instance network info cache due to event network-changed-220c5589-f035-4097-8c0d-dfd565a9203a. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1805.098728] env[62684]: DEBUG oslo_concurrency.lockutils [req-9467f7f8-012d-41a8-8d38-11f6b3b6e2bd req-f3b29062-9120-4bd1-a707-4203167c2d7e service nova] Acquiring lock "refresh_cache-4e5152b0-7bac-4dc2-b6c7-6590fa2d5978" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1805.098868] env[62684]: DEBUG oslo_concurrency.lockutils [req-9467f7f8-012d-41a8-8d38-11f6b3b6e2bd req-f3b29062-9120-4bd1-a707-4203167c2d7e service nova] Acquired lock "refresh_cache-4e5152b0-7bac-4dc2-b6c7-6590fa2d5978" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1805.099246] env[62684]: DEBUG nova.network.neutron [req-9467f7f8-012d-41a8-8d38-11f6b3b6e2bd req-f3b29062-9120-4bd1-a707-4203167c2d7e service nova] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Refreshing network info cache for port 220c5589-f035-4097-8c0d-dfd565a9203a {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1805.209597] env[62684]: DEBUG oslo_vmware.api [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052528, 'name': CloneVM_Task, 'duration_secs': 1.742738} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1805.209815] env[62684]: INFO nova.virt.vmwareapi.vmops [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Created linked-clone VM from snapshot [ 1805.210582] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5726150-58c7-43d8-b39e-a015d8f5b3d9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.220454] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Uploading image 4c9423b8-2ad2-4bc7-8e51-1037ab451492 {{(pid=62684) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1805.234436] env[62684]: DEBUG nova.network.neutron [-] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1805.246654] env[62684]: DEBUG oslo_vmware.rw_handles [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1805.246654] env[62684]: value = "vm-421198" [ 1805.246654] env[62684]: _type = "VirtualMachine" [ 1805.246654] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1805.246927] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-3d053e24-2da0-4d0d-8ea3-7896745cbc30 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.254231] env[62684]: DEBUG oslo_vmware.rw_handles [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Lease: (returnval){ [ 1805.254231] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529060e8-2926-08e0-2ef2-7225c4b52c67" [ 1805.254231] env[62684]: _type = "HttpNfcLease" [ 1805.254231] env[62684]: } obtained for exporting VM: (result){ [ 1805.254231] env[62684]: value = "vm-421198" [ 1805.254231] env[62684]: _type = "VirtualMachine" [ 1805.254231] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1805.254775] env[62684]: DEBUG oslo_vmware.api [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Waiting for the lease: (returnval){ [ 1805.254775] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529060e8-2926-08e0-2ef2-7225c4b52c67" [ 1805.254775] env[62684]: _type = "HttpNfcLease" [ 1805.254775] env[62684]: } to be ready. {{(pid=62684) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1805.261110] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1805.261110] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529060e8-2926-08e0-2ef2-7225c4b52c67" [ 1805.261110] env[62684]: _type = "HttpNfcLease" [ 1805.261110] env[62684]: } is initializing. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1805.338076] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Releasing lock "refresh_cache-f44b2e88-af6d-4252-b562-9d5fa7745b56" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1805.338435] env[62684]: DEBUG nova.compute.manager [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Instance network_info: |[{"id": "04643826-a651-4eba-be4f-57825aa4f302", "address": "fa:16:3e:c9:b1:61", "network": {"id": "09056ab7-a8ec-4de0-bd66-9437b6d91612", "bridge": "br-int", "label": "tempest-ServersTestJSON-198717452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41ab694b1d944a9fb8206fa6e4461e78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap04643826-a6", "ovs_interfaceid": "04643826-a651-4eba-be4f-57825aa4f302", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1805.338874] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c9:b1:61', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd4020f51-6e46-4b73-a79e-9fe3fd51b917', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '04643826-a651-4eba-be4f-57825aa4f302', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1805.346441] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Creating folder: Project (41ab694b1d944a9fb8206fa6e4461e78). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1805.346720] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-402ca7ae-4537-493e-a388-78a5a60f4d5a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.358642] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Created folder: Project (41ab694b1d944a9fb8206fa6e4461e78) in parent group-v421118. [ 1805.358642] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Creating folder: Instances. Parent ref: group-v421199. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1805.358642] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d3eb27c0-6c39-40b2-ac36-a8fe9fdaa4fd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.368223] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Created folder: Instances in parent group-v421199. [ 1805.368223] env[62684]: DEBUG oslo.service.loopingcall [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1805.368436] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1805.368646] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-32af7650-d5ce-4ad0-9df7-f9c1452218d4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.383661] env[62684]: DEBUG oslo_concurrency.lockutils [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Releasing lock "refresh_cache-8c046991-b294-4f33-9fce-a241984d66d7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1805.383967] env[62684]: DEBUG nova.compute.manager [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Instance network_info: |[{"id": "b474f9ed-1ee0-4186-9dd7-336689da4726", "address": "fa:16:3e:a0:06:f4", "network": {"id": "69e30a6f-44ce-4db9-8a6c-6fe6a85d2f08", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-359372498-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53e7a6c764124be0993623bea3a56561", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "21310d90-efbc-45a8-a97f-c4358606530f", "external-id": "nsx-vlan-transportzone-672", "segmentation_id": 672, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb474f9ed-1e", "ovs_interfaceid": "b474f9ed-1ee0-4186-9dd7-336689da4726", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1805.384783] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a0:06:f4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '21310d90-efbc-45a8-a97f-c4358606530f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b474f9ed-1ee0-4186-9dd7-336689da4726', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1805.392541] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Creating folder: Project (53e7a6c764124be0993623bea3a56561). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1805.394062] env[62684]: DEBUG nova.compute.manager [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1805.395987] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-95a89d52-5c71-4acd-a294-f7dd3ed93704 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.399414] env[62684]: DEBUG nova.network.neutron [-] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1805.400539] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1805.400539] env[62684]: value = "task-2052536" [ 1805.400539] env[62684]: _type = "Task" [ 1805.400539] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1805.408693] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052536, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.411344] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Created folder: Project (53e7a6c764124be0993623bea3a56561) in parent group-v421118. [ 1805.411344] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Creating folder: Instances. Parent ref: group-v421201. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1805.411344] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-96a3203a-9eae-43fb-b591-c1800437beff {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.422602] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Created folder: Instances in parent group-v421201. [ 1805.422602] env[62684]: DEBUG oslo.service.loopingcall [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1805.422602] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1805.422764] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4691a2a9-8bdf-4ed6-b84a-0bf67614d9bd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.440504] env[62684]: DEBUG nova.virt.hardware [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1805.440751] env[62684]: DEBUG nova.virt.hardware [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1805.440909] env[62684]: DEBUG nova.virt.hardware [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1805.441111] env[62684]: DEBUG nova.virt.hardware [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1805.441264] env[62684]: DEBUG nova.virt.hardware [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1805.441414] env[62684]: DEBUG nova.virt.hardware [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1805.441624] env[62684]: DEBUG nova.virt.hardware [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1805.441782] env[62684]: DEBUG nova.virt.hardware [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1805.441946] env[62684]: DEBUG nova.virt.hardware [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1805.442127] env[62684]: DEBUG nova.virt.hardware [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1805.442380] env[62684]: DEBUG nova.virt.hardware [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1805.443426] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8db2198f-c380-4d01-b096-e85f3b793d6d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.448399] env[62684]: DEBUG nova.compute.manager [req-f529c063-6302-4703-8226-428d7601764f req-826725b5-c9c5-4576-aae1-a5ab082f988a service nova] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Received event network-changed-04643826-a651-4eba-be4f-57825aa4f302 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1805.448607] env[62684]: DEBUG nova.compute.manager [req-f529c063-6302-4703-8226-428d7601764f req-826725b5-c9c5-4576-aae1-a5ab082f988a service nova] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Refreshing instance network info cache due to event network-changed-04643826-a651-4eba-be4f-57825aa4f302. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1805.448843] env[62684]: DEBUG oslo_concurrency.lockutils [req-f529c063-6302-4703-8226-428d7601764f req-826725b5-c9c5-4576-aae1-a5ab082f988a service nova] Acquiring lock "refresh_cache-f44b2e88-af6d-4252-b562-9d5fa7745b56" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1805.449024] env[62684]: DEBUG oslo_concurrency.lockutils [req-f529c063-6302-4703-8226-428d7601764f req-826725b5-c9c5-4576-aae1-a5ab082f988a service nova] Acquired lock "refresh_cache-f44b2e88-af6d-4252-b562-9d5fa7745b56" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1805.449199] env[62684]: DEBUG nova.network.neutron [req-f529c063-6302-4703-8226-428d7601764f req-826725b5-c9c5-4576-aae1-a5ab082f988a service nova] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Refreshing network info cache for port 04643826-a651-4eba-be4f-57825aa4f302 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1805.452905] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1805.452905] env[62684]: value = "task-2052539" [ 1805.452905] env[62684]: _type = "Task" [ 1805.452905] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1805.459455] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b7c5f05-c84e-4b3e-9841-abd6f8bffa8b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.467449] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052539, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.572482] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f0f23944-6afb-480a-8b3f-1f918b7b1dc3 tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.207s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1805.576543] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 27.657s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1805.576543] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1805.576777] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62684) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1805.576887] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c11d13a4-9562-4dbc-94ba-facca48d849b tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.510s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1805.577144] env[62684]: DEBUG nova.objects.instance [None req-c11d13a4-9562-4dbc-94ba-facca48d849b tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Lazy-loading 'resources' on Instance uuid 0f9a525c-09b9-483e-b418-fea6e6e5dc4a {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1805.578873] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f788d90b-5b00-48c9-acc2-7c13d3025678 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.588998] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c04995b1-fae1-44c2-b527-8af20832e39f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.606492] env[62684]: INFO nova.scheduler.client.report [None req-f0f23944-6afb-480a-8b3f-1f918b7b1dc3 tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Deleted allocations for instance aec16a15-5d75-4ea6-800b-1bf67f762d89 [ 1805.608330] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1579f64d-faff-4f55-88e1-f5d410260c89 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.618693] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cb931a4-0bca-45d8-8b73-cfa3b0d030ca {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.650777] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180940MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=62684) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1805.650953] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1805.739033] env[62684]: INFO nova.compute.manager [-] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Took 1.60 seconds to deallocate network for instance. [ 1805.762633] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1805.762633] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529060e8-2926-08e0-2ef2-7225c4b52c67" [ 1805.762633] env[62684]: _type = "HttpNfcLease" [ 1805.762633] env[62684]: } is ready. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1805.762917] env[62684]: DEBUG oslo_vmware.rw_handles [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1805.762917] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529060e8-2926-08e0-2ef2-7225c4b52c67" [ 1805.762917] env[62684]: _type = "HttpNfcLease" [ 1805.762917] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1805.763721] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d69503c6-0e32-40bd-af28-5316f12d2f6d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.773574] env[62684]: DEBUG oslo_vmware.rw_handles [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528b2565-8f34-7a99-817d-e03fbd09a6c9/disk-0.vmdk from lease info. {{(pid=62684) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1805.773774] env[62684]: DEBUG oslo_vmware.rw_handles [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528b2565-8f34-7a99-817d-e03fbd09a6c9/disk-0.vmdk for reading. {{(pid=62684) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1805.863414] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-06524eee-bbd3-45de-8362-07271667f9cb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.904622] env[62684]: INFO nova.compute.manager [-] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Took 1.77 seconds to deallocate network for instance. [ 1805.915568] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052536, 'name': CreateVM_Task, 'duration_secs': 0.365264} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1805.915739] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1805.916426] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1805.916593] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1805.916937] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1805.917234] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c52df6f1-24c2-4f87-b945-abf32b7190d0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.921315] env[62684]: DEBUG nova.network.neutron [req-9467f7f8-012d-41a8-8d38-11f6b3b6e2bd req-f3b29062-9120-4bd1-a707-4203167c2d7e service nova] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Updated VIF entry in instance network info cache for port 220c5589-f035-4097-8c0d-dfd565a9203a. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1805.921654] env[62684]: DEBUG nova.network.neutron [req-9467f7f8-012d-41a8-8d38-11f6b3b6e2bd req-f3b29062-9120-4bd1-a707-4203167c2d7e service nova] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Updating instance_info_cache with network_info: [{"id": "220c5589-f035-4097-8c0d-dfd565a9203a", "address": "fa:16:3e:3e:7c:34", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.235", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap220c5589-f0", "ovs_interfaceid": "220c5589-f035-4097-8c0d-dfd565a9203a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1805.926018] env[62684]: DEBUG oslo_vmware.api [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Waiting for the task: (returnval){ [ 1805.926018] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c73db7-3fc9-6d81-ad57-75c0f2bf0842" [ 1805.926018] env[62684]: _type = "Task" [ 1805.926018] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1805.932846] env[62684]: DEBUG oslo_vmware.api [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c73db7-3fc9-6d81-ad57-75c0f2bf0842, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.967834] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052539, 'name': CreateVM_Task, 'duration_secs': 0.435208} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1805.967984] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1805.968632] env[62684]: DEBUG oslo_concurrency.lockutils [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1806.119630] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f0f23944-6afb-480a-8b3f-1f918b7b1dc3 tempest-ServerDiagnosticsTest-231119697 tempest-ServerDiagnosticsTest-231119697-project-member] Lock "aec16a15-5d75-4ea6-800b-1bf67f762d89" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.449s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1806.125188] env[62684]: DEBUG nova.network.neutron [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Successfully updated port: 140f3118-d3fd-4be3-a661-89c3cac2cb26 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1806.244497] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f6d61b2e-b4ea-41c4-a3ce-d05ef53dfd04 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1806.354548] env[62684]: DEBUG nova.network.neutron [req-f529c063-6302-4703-8226-428d7601764f req-826725b5-c9c5-4576-aae1-a5ab082f988a service nova] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Updated VIF entry in instance network info cache for port 04643826-a651-4eba-be4f-57825aa4f302. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1806.354969] env[62684]: DEBUG nova.network.neutron [req-f529c063-6302-4703-8226-428d7601764f req-826725b5-c9c5-4576-aae1-a5ab082f988a service nova] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Updating instance_info_cache with network_info: [{"id": "04643826-a651-4eba-be4f-57825aa4f302", "address": "fa:16:3e:c9:b1:61", "network": {"id": "09056ab7-a8ec-4de0-bd66-9437b6d91612", "bridge": "br-int", "label": "tempest-ServersTestJSON-198717452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41ab694b1d944a9fb8206fa6e4461e78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap04643826-a6", "ovs_interfaceid": "04643826-a651-4eba-be4f-57825aa4f302", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1806.412298] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5304a8c6-4bc7-4303-9b48-aa2fdb45bd63 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1806.424290] env[62684]: DEBUG oslo_concurrency.lockutils [req-9467f7f8-012d-41a8-8d38-11f6b3b6e2bd req-f3b29062-9120-4bd1-a707-4203167c2d7e service nova] Releasing lock "refresh_cache-4e5152b0-7bac-4dc2-b6c7-6590fa2d5978" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1806.441174] env[62684]: DEBUG oslo_vmware.api [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c73db7-3fc9-6d81-ad57-75c0f2bf0842, 'name': SearchDatastore_Task, 'duration_secs': 0.012057} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1806.441174] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1806.441693] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1806.442469] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1806.442871] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1806.443222] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1806.446584] env[62684]: DEBUG oslo_concurrency.lockutils [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1806.447200] env[62684]: DEBUG oslo_concurrency.lockutils [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1806.447617] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-71e047b3-3c34-42df-b48c-21d744f8b3b1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.449786] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-572d9bb9-641e-401d-9a09-a67769084c9e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.455918] env[62684]: DEBUG oslo_vmware.api [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Waiting for the task: (returnval){ [ 1806.455918] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523975f1-3482-c5b6-f776-c84198a5dac0" [ 1806.455918] env[62684]: _type = "Task" [ 1806.455918] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1806.460617] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1806.461619] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1806.465486] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf7fc30c-504b-43b1-8a09-19e8d876dd44 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.474473] env[62684]: DEBUG oslo_vmware.api [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523975f1-3482-c5b6-f776-c84198a5dac0, 'name': SearchDatastore_Task, 'duration_secs': 0.009435} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1806.475223] env[62684]: DEBUG oslo_concurrency.lockutils [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1806.475711] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1806.476126] env[62684]: DEBUG oslo_concurrency.lockutils [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1806.477541] env[62684]: DEBUG oslo_vmware.api [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Waiting for the task: (returnval){ [ 1806.477541] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5260e7d5-f69d-5e54-80f3-d90cae338310" [ 1806.477541] env[62684]: _type = "Task" [ 1806.477541] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1806.488663] env[62684]: DEBUG oslo_vmware.api [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5260e7d5-f69d-5e54-80f3-d90cae338310, 'name': SearchDatastore_Task, 'duration_secs': 0.008588} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1806.492019] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c752c43-3fd3-452f-baef-5f239a69430f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.495719] env[62684]: DEBUG oslo_vmware.api [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Waiting for the task: (returnval){ [ 1806.495719] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5289418b-14fb-7dad-1e6c-edb87172b5c0" [ 1806.495719] env[62684]: _type = "Task" [ 1806.495719] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1806.508509] env[62684]: DEBUG oslo_vmware.api [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5289418b-14fb-7dad-1e6c-edb87172b5c0, 'name': SearchDatastore_Task, 'duration_secs': 0.008344} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1806.509077] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1806.509687] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] f44b2e88-af6d-4252-b562-9d5fa7745b56/f44b2e88-af6d-4252-b562-9d5fa7745b56.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1806.512759] env[62684]: DEBUG oslo_concurrency.lockutils [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1806.513101] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1806.513508] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8f1bf68a-21ce-4a48-8d9b-172d065fa65e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.519438] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c97fbd8a-933c-44b4-aea2-2b3fe7950f7d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.526022] env[62684]: DEBUG oslo_vmware.api [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Waiting for the task: (returnval){ [ 1806.526022] env[62684]: value = "task-2052540" [ 1806.526022] env[62684]: _type = "Task" [ 1806.526022] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1806.530540] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1806.530773] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1806.534385] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8ad3b31-25f9-4780-9929-1918494546cc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.540957] env[62684]: DEBUG oslo_vmware.api [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Task: {'id': task-2052540, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.543873] env[62684]: DEBUG oslo_vmware.api [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Waiting for the task: (returnval){ [ 1806.543873] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]524ffe14-541b-abfd-620e-93731261297c" [ 1806.543873] env[62684]: _type = "Task" [ 1806.543873] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1806.556772] env[62684]: DEBUG oslo_vmware.api [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]524ffe14-541b-abfd-620e-93731261297c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.633216] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Acquiring lock "refresh_cache-dfe40a8c-61d6-4c60-afd3-0defb61c4308" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1806.633379] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Acquired lock "refresh_cache-dfe40a8c-61d6-4c60-afd3-0defb61c4308" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1806.633534] env[62684]: DEBUG nova.network.neutron [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1806.674661] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d1acd6b-a644-4991-bf1f-2980a08232ba {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.682791] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a89c5762-7075-418b-842a-23fcacf6df02 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.716985] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95f81a17-8fe8-4299-8bfd-87a16527d9f5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.725912] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adb92b83-f0e0-437d-b3e7-d34d73194ed5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.744558] env[62684]: DEBUG nova.compute.provider_tree [None req-c11d13a4-9562-4dbc-94ba-facca48d849b tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1806.857852] env[62684]: DEBUG oslo_concurrency.lockutils [req-f529c063-6302-4703-8226-428d7601764f req-826725b5-c9c5-4576-aae1-a5ab082f988a service nova] Releasing lock "refresh_cache-f44b2e88-af6d-4252-b562-9d5fa7745b56" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1806.858497] env[62684]: DEBUG nova.compute.manager [req-f529c063-6302-4703-8226-428d7601764f req-826725b5-c9c5-4576-aae1-a5ab082f988a service nova] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Received event network-vif-deleted-f30c0c93-502e-48a6-b8f1-c44350487322 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1807.037706] env[62684]: DEBUG oslo_vmware.api [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Task: {'id': task-2052540, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.054123] env[62684]: DEBUG oslo_vmware.api [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]524ffe14-541b-abfd-620e-93731261297c, 'name': SearchDatastore_Task, 'duration_secs': 0.008886} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1807.055094] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a5fe604-9591-496a-b143-6658d7e26ff8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.060647] env[62684]: DEBUG oslo_vmware.api [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Waiting for the task: (returnval){ [ 1807.060647] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5296e83a-4a51-695d-e647-a200379c0762" [ 1807.060647] env[62684]: _type = "Task" [ 1807.060647] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.069230] env[62684]: DEBUG oslo_vmware.api [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5296e83a-4a51-695d-e647-a200379c0762, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.130691] env[62684]: DEBUG nova.compute.manager [req-8af522d8-44fd-4cc5-9fc8-d0c7480861c4 req-8a3ab88d-6d22-46f9-af50-622677512b6b service nova] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Received event network-vif-deleted-6b84c070-8ca0-4da2-9936-7e97377d47ed {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1807.184790] env[62684]: DEBUG nova.network.neutron [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1807.249207] env[62684]: DEBUG nova.scheduler.client.report [None req-c11d13a4-9562-4dbc-94ba-facca48d849b tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1807.414762] env[62684]: DEBUG nova.network.neutron [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Updating instance_info_cache with network_info: [{"id": "140f3118-d3fd-4be3-a661-89c3cac2cb26", "address": "fa:16:3e:eb:04:23", "network": {"id": "afa57c36-637b-4edc-96e8-5e5ff2185258", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-270820560-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5513297908414207afa484e075bd26e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "26a1e556-1ede-445a-bf87-a35daa5d9070", "external-id": "nsx-vlan-transportzone-396", "segmentation_id": 396, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap140f3118-d3", "ovs_interfaceid": "140f3118-d3fd-4be3-a661-89c3cac2cb26", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1807.542760] env[62684]: DEBUG oslo_vmware.api [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Task: {'id': task-2052540, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.552105} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1807.543425] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] f44b2e88-af6d-4252-b562-9d5fa7745b56/f44b2e88-af6d-4252-b562-9d5fa7745b56.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1807.543834] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1807.544252] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8df93fea-8646-4f40-9689-3c11281af419 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.553588] env[62684]: DEBUG oslo_vmware.api [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Waiting for the task: (returnval){ [ 1807.553588] env[62684]: value = "task-2052541" [ 1807.553588] env[62684]: _type = "Task" [ 1807.553588] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.571300] env[62684]: DEBUG oslo_vmware.api [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Task: {'id': task-2052541, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.579189] env[62684]: DEBUG oslo_vmware.api [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5296e83a-4a51-695d-e647-a200379c0762, 'name': SearchDatastore_Task, 'duration_secs': 0.010768} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1807.580513] env[62684]: DEBUG oslo_concurrency.lockutils [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1807.580821] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 8c046991-b294-4f33-9fce-a241984d66d7/8c046991-b294-4f33-9fce-a241984d66d7.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1807.582132] env[62684]: DEBUG nova.compute.manager [req-4524cc20-a667-4505-9ce5-e514bbca7bec req-6bd09322-e958-4489-b483-0a65da787d7b service nova] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Received event network-vif-plugged-140f3118-d3fd-4be3-a661-89c3cac2cb26 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1807.582293] env[62684]: DEBUG oslo_concurrency.lockutils [req-4524cc20-a667-4505-9ce5-e514bbca7bec req-6bd09322-e958-4489-b483-0a65da787d7b service nova] Acquiring lock "dfe40a8c-61d6-4c60-afd3-0defb61c4308-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1807.582533] env[62684]: DEBUG oslo_concurrency.lockutils [req-4524cc20-a667-4505-9ce5-e514bbca7bec req-6bd09322-e958-4489-b483-0a65da787d7b service nova] Lock "dfe40a8c-61d6-4c60-afd3-0defb61c4308-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1807.582728] env[62684]: DEBUG oslo_concurrency.lockutils [req-4524cc20-a667-4505-9ce5-e514bbca7bec req-6bd09322-e958-4489-b483-0a65da787d7b service nova] Lock "dfe40a8c-61d6-4c60-afd3-0defb61c4308-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1807.582936] env[62684]: DEBUG nova.compute.manager [req-4524cc20-a667-4505-9ce5-e514bbca7bec req-6bd09322-e958-4489-b483-0a65da787d7b service nova] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] No waiting events found dispatching network-vif-plugged-140f3118-d3fd-4be3-a661-89c3cac2cb26 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1807.583168] env[62684]: WARNING nova.compute.manager [req-4524cc20-a667-4505-9ce5-e514bbca7bec req-6bd09322-e958-4489-b483-0a65da787d7b service nova] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Received unexpected event network-vif-plugged-140f3118-d3fd-4be3-a661-89c3cac2cb26 for instance with vm_state building and task_state spawning. [ 1807.583390] env[62684]: DEBUG nova.compute.manager [req-4524cc20-a667-4505-9ce5-e514bbca7bec req-6bd09322-e958-4489-b483-0a65da787d7b service nova] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Received event network-changed-140f3118-d3fd-4be3-a661-89c3cac2cb26 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1807.583618] env[62684]: DEBUG nova.compute.manager [req-4524cc20-a667-4505-9ce5-e514bbca7bec req-6bd09322-e958-4489-b483-0a65da787d7b service nova] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Refreshing instance network info cache due to event network-changed-140f3118-d3fd-4be3-a661-89c3cac2cb26. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1807.583926] env[62684]: DEBUG oslo_concurrency.lockutils [req-4524cc20-a667-4505-9ce5-e514bbca7bec req-6bd09322-e958-4489-b483-0a65da787d7b service nova] Acquiring lock "refresh_cache-dfe40a8c-61d6-4c60-afd3-0defb61c4308" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1807.584197] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a1aaabce-31c1-4f05-a65f-670c18654466 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.591700] env[62684]: DEBUG oslo_vmware.api [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Waiting for the task: (returnval){ [ 1807.591700] env[62684]: value = "task-2052542" [ 1807.591700] env[62684]: _type = "Task" [ 1807.591700] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.600670] env[62684]: DEBUG oslo_vmware.api [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Task: {'id': task-2052542, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.756822] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c11d13a4-9562-4dbc-94ba-facca48d849b tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.178s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1807.758448] env[62684]: DEBUG oslo_concurrency.lockutils [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.905s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1807.760247] env[62684]: INFO nova.compute.claims [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1807.789413] env[62684]: INFO nova.scheduler.client.report [None req-c11d13a4-9562-4dbc-94ba-facca48d849b tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Deleted allocations for instance 0f9a525c-09b9-483e-b418-fea6e6e5dc4a [ 1807.919069] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Releasing lock "refresh_cache-dfe40a8c-61d6-4c60-afd3-0defb61c4308" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1807.919482] env[62684]: DEBUG nova.compute.manager [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Instance network_info: |[{"id": "140f3118-d3fd-4be3-a661-89c3cac2cb26", "address": "fa:16:3e:eb:04:23", "network": {"id": "afa57c36-637b-4edc-96e8-5e5ff2185258", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-270820560-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5513297908414207afa484e075bd26e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "26a1e556-1ede-445a-bf87-a35daa5d9070", "external-id": "nsx-vlan-transportzone-396", "segmentation_id": 396, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap140f3118-d3", "ovs_interfaceid": "140f3118-d3fd-4be3-a661-89c3cac2cb26", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1807.920214] env[62684]: DEBUG oslo_concurrency.lockutils [req-4524cc20-a667-4505-9ce5-e514bbca7bec req-6bd09322-e958-4489-b483-0a65da787d7b service nova] Acquired lock "refresh_cache-dfe40a8c-61d6-4c60-afd3-0defb61c4308" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1807.920413] env[62684]: DEBUG nova.network.neutron [req-4524cc20-a667-4505-9ce5-e514bbca7bec req-6bd09322-e958-4489-b483-0a65da787d7b service nova] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Refreshing network info cache for port 140f3118-d3fd-4be3-a661-89c3cac2cb26 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1807.921715] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:eb:04:23', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '26a1e556-1ede-445a-bf87-a35daa5d9070', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '140f3118-d3fd-4be3-a661-89c3cac2cb26', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1807.932331] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Creating folder: Project (5513297908414207afa484e075bd26e8). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1807.933459] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ef8c1276-1d9d-4b41-9d0b-44b0c3fe4891 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.947652] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Created folder: Project (5513297908414207afa484e075bd26e8) in parent group-v421118. [ 1807.947895] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Creating folder: Instances. Parent ref: group-v421205. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1807.948493] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b7ced688-547c-4fa8-8707-59da47e2ed6e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.958779] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Created folder: Instances in parent group-v421205. [ 1807.959086] env[62684]: DEBUG oslo.service.loopingcall [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1807.959347] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1807.959582] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ba4e4d58-819a-4e48-8417-50e5e719de4d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.982706] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1807.982706] env[62684]: value = "task-2052545" [ 1807.982706] env[62684]: _type = "Task" [ 1807.982706] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.992737] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052545, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.065771] env[62684]: DEBUG oslo_vmware.api [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Task: {'id': task-2052541, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069626} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1808.066241] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1808.067112] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae53a4c9-bb17-4eb5-ab14-a27c5b2a7506 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.091546] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Reconfiguring VM instance instance-0000001b to attach disk [datastore2] f44b2e88-af6d-4252-b562-9d5fa7745b56/f44b2e88-af6d-4252-b562-9d5fa7745b56.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1808.091957] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b279d999-30c2-468b-92e5-05be0d39106e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.115262] env[62684]: DEBUG oslo_vmware.api [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Task: {'id': task-2052542, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.485393} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1808.116642] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 8c046991-b294-4f33-9fce-a241984d66d7/8c046991-b294-4f33-9fce-a241984d66d7.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1808.116908] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1808.117298] env[62684]: DEBUG oslo_vmware.api [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Waiting for the task: (returnval){ [ 1808.117298] env[62684]: value = "task-2052546" [ 1808.117298] env[62684]: _type = "Task" [ 1808.117298] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.117519] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8c6868b2-575f-4f3b-8852-8ad79966708f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.128572] env[62684]: DEBUG oslo_vmware.api [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Task: {'id': task-2052546, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.129919] env[62684]: DEBUG oslo_vmware.api [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Waiting for the task: (returnval){ [ 1808.129919] env[62684]: value = "task-2052547" [ 1808.129919] env[62684]: _type = "Task" [ 1808.129919] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.138713] env[62684]: DEBUG oslo_vmware.api [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Task: {'id': task-2052547, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.298438] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c11d13a4-9562-4dbc-94ba-facca48d849b tempest-ImagesOneServerTestJSON-655524431 tempest-ImagesOneServerTestJSON-655524431-project-member] Lock "0f9a525c-09b9-483e-b418-fea6e6e5dc4a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.429s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1808.494149] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052545, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.632445] env[62684]: DEBUG oslo_vmware.api [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Task: {'id': task-2052546, 'name': ReconfigVM_Task, 'duration_secs': 0.35453} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1808.636371] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Reconfigured VM instance instance-0000001b to attach disk [datastore2] f44b2e88-af6d-4252-b562-9d5fa7745b56/f44b2e88-af6d-4252-b562-9d5fa7745b56.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1808.637147] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cd357f53-1f35-4e08-8632-7fb304848928 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.645049] env[62684]: DEBUG oslo_vmware.api [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Task: {'id': task-2052547, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082829} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1808.646300] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1808.646646] env[62684]: DEBUG oslo_vmware.api [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Waiting for the task: (returnval){ [ 1808.646646] env[62684]: value = "task-2052548" [ 1808.646646] env[62684]: _type = "Task" [ 1808.646646] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.647373] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8786d29-c379-4637-ae18-65371f851fb5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.658645] env[62684]: DEBUG oslo_vmware.api [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Task: {'id': task-2052548, 'name': Rename_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.677780] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Reconfiguring VM instance instance-0000001a to attach disk [datastore2] 8c046991-b294-4f33-9fce-a241984d66d7/8c046991-b294-4f33-9fce-a241984d66d7.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1808.678127] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-52e1da33-8a7f-45fa-9825-1d7e5a6bd49d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.700928] env[62684]: DEBUG oslo_vmware.api [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Waiting for the task: (returnval){ [ 1808.700928] env[62684]: value = "task-2052549" [ 1808.700928] env[62684]: _type = "Task" [ 1808.700928] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.710695] env[62684]: DEBUG oslo_vmware.api [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Task: {'id': task-2052549, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.996218] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052545, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.162140] env[62684]: DEBUG oslo_vmware.api [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Task: {'id': task-2052548, 'name': Rename_Task, 'duration_secs': 0.178975} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.165552] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1809.166388] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9a41beb0-5bf7-4d40-9164-78c208246a15 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.174251] env[62684]: DEBUG oslo_vmware.api [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Waiting for the task: (returnval){ [ 1809.174251] env[62684]: value = "task-2052550" [ 1809.174251] env[62684]: _type = "Task" [ 1809.174251] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.185417] env[62684]: DEBUG oslo_vmware.api [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Task: {'id': task-2052550, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.214171] env[62684]: DEBUG oslo_vmware.api [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Task: {'id': task-2052549, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.294546] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c371bfa1-2e8a-430f-8084-e23b309aa751 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.302352] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-107880b9-a154-4351-9d2a-6048aa45f8ca {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.336552] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a81e95c1-0bcc-40f4-ab08-45375cb668db {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.347663] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb62ed8a-1f61-4718-84f2-fe284b944921 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.363261] env[62684]: DEBUG nova.compute.provider_tree [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1809.370420] env[62684]: DEBUG nova.network.neutron [req-4524cc20-a667-4505-9ce5-e514bbca7bec req-6bd09322-e958-4489-b483-0a65da787d7b service nova] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Updated VIF entry in instance network info cache for port 140f3118-d3fd-4be3-a661-89c3cac2cb26. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1809.370532] env[62684]: DEBUG nova.network.neutron [req-4524cc20-a667-4505-9ce5-e514bbca7bec req-6bd09322-e958-4489-b483-0a65da787d7b service nova] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Updating instance_info_cache with network_info: [{"id": "140f3118-d3fd-4be3-a661-89c3cac2cb26", "address": "fa:16:3e:eb:04:23", "network": {"id": "afa57c36-637b-4edc-96e8-5e5ff2185258", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-270820560-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5513297908414207afa484e075bd26e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "26a1e556-1ede-445a-bf87-a35daa5d9070", "external-id": "nsx-vlan-transportzone-396", "segmentation_id": 396, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap140f3118-d3", "ovs_interfaceid": "140f3118-d3fd-4be3-a661-89c3cac2cb26", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1809.506807] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052545, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.692922] env[62684]: DEBUG oslo_vmware.api [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Task: {'id': task-2052550, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.714929] env[62684]: DEBUG oslo_vmware.api [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Task: {'id': task-2052549, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.867640] env[62684]: DEBUG nova.scheduler.client.report [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1809.873186] env[62684]: DEBUG oslo_concurrency.lockutils [req-4524cc20-a667-4505-9ce5-e514bbca7bec req-6bd09322-e958-4489-b483-0a65da787d7b service nova] Releasing lock "refresh_cache-dfe40a8c-61d6-4c60-afd3-0defb61c4308" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1809.998614] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052545, 'name': CreateVM_Task, 'duration_secs': 1.67169} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.998960] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1809.999582] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1809.999758] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1810.000103] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1810.000367] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90fda8b0-4f71-420b-b95b-cbc402219fa0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.005192] env[62684]: DEBUG oslo_vmware.api [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Waiting for the task: (returnval){ [ 1810.005192] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c62440-4d55-4953-672b-05f49fc4fbd4" [ 1810.005192] env[62684]: _type = "Task" [ 1810.005192] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.013769] env[62684]: DEBUG oslo_vmware.api [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c62440-4d55-4953-672b-05f49fc4fbd4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.186578] env[62684]: DEBUG oslo_vmware.api [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Task: {'id': task-2052550, 'name': PowerOnVM_Task, 'duration_secs': 0.559685} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1810.186870] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1810.187101] env[62684]: INFO nova.compute.manager [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Took 7.56 seconds to spawn the instance on the hypervisor. [ 1810.187295] env[62684]: DEBUG nova.compute.manager [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1810.188096] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a6aa46e-a2a2-4036-80d6-5cdc0b9a9f61 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.222506] env[62684]: DEBUG oslo_vmware.api [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Task: {'id': task-2052549, 'name': ReconfigVM_Task, 'duration_secs': 1.456582} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1810.222854] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Reconfigured VM instance instance-0000001a to attach disk [datastore2] 8c046991-b294-4f33-9fce-a241984d66d7/8c046991-b294-4f33-9fce-a241984d66d7.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1810.223575] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6fb5cbf6-0ba0-4826-992e-bb7081c97d56 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.235214] env[62684]: DEBUG oslo_vmware.api [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Waiting for the task: (returnval){ [ 1810.235214] env[62684]: value = "task-2052551" [ 1810.235214] env[62684]: _type = "Task" [ 1810.235214] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.242403] env[62684]: DEBUG oslo_vmware.api [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Task: {'id': task-2052551, 'name': Rename_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.375980] env[62684]: DEBUG oslo_concurrency.lockutils [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.618s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1810.376676] env[62684]: DEBUG nova.compute.manager [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1810.379549] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.946s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1810.381103] env[62684]: INFO nova.compute.claims [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1810.520478] env[62684]: DEBUG oslo_vmware.api [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c62440-4d55-4953-672b-05f49fc4fbd4, 'name': SearchDatastore_Task, 'duration_secs': 0.010491} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1810.520811] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1810.521072] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1810.521321] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1810.521473] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1810.521656] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1810.521937] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4c254cec-199a-40b7-b62e-781fad33d893 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.531379] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1810.531575] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1810.532392] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3bddf56-1e5b-48f8-a2ea-b49bd98f7425 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.538260] env[62684]: DEBUG oslo_vmware.api [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Waiting for the task: (returnval){ [ 1810.538260] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]522b73ce-a6a9-ca72-35e3-331ce8cd44b8" [ 1810.538260] env[62684]: _type = "Task" [ 1810.538260] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.547365] env[62684]: DEBUG oslo_vmware.api [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]522b73ce-a6a9-ca72-35e3-331ce8cd44b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.713802] env[62684]: INFO nova.compute.manager [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Took 37.73 seconds to build instance. [ 1810.746214] env[62684]: DEBUG oslo_vmware.api [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Task: {'id': task-2052551, 'name': Rename_Task, 'duration_secs': 0.175453} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1810.746214] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1810.746214] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-31256777-e528-41c2-889a-7b53f87b5713 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.756600] env[62684]: DEBUG oslo_vmware.api [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Waiting for the task: (returnval){ [ 1810.756600] env[62684]: value = "task-2052552" [ 1810.756600] env[62684]: _type = "Task" [ 1810.756600] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.764820] env[62684]: DEBUG oslo_vmware.api [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Task: {'id': task-2052552, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.888027] env[62684]: DEBUG nova.compute.utils [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1810.896023] env[62684]: DEBUG nova.compute.manager [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1810.896023] env[62684]: DEBUG nova.network.neutron [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1811.023319] env[62684]: DEBUG nova.policy [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3796eb780d684cdcad4acc92ae15fa6e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e169ffb4120143dca6d67108986e62f6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1811.051401] env[62684]: DEBUG oslo_vmware.api [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]522b73ce-a6a9-ca72-35e3-331ce8cd44b8, 'name': SearchDatastore_Task, 'duration_secs': 0.008546} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1811.051921] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-373a9c2b-b59e-4375-bbcb-80b9f33c7181 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.059499] env[62684]: DEBUG oslo_vmware.api [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Waiting for the task: (returnval){ [ 1811.059499] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5284fa5d-c261-65df-25dc-5113269377fa" [ 1811.059499] env[62684]: _type = "Task" [ 1811.059499] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.068741] env[62684]: DEBUG oslo_vmware.api [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5284fa5d-c261-65df-25dc-5113269377fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.217467] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e8bd4e6c-107c-4401-8901-ee7472beb50b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Lock "f44b2e88-af6d-4252-b562-9d5fa7745b56" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.521s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1811.269580] env[62684]: DEBUG oslo_vmware.api [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Task: {'id': task-2052552, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.393352] env[62684]: DEBUG nova.compute.manager [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1811.575048] env[62684]: DEBUG oslo_vmware.api [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5284fa5d-c261-65df-25dc-5113269377fa, 'name': SearchDatastore_Task, 'duration_secs': 0.010592} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1811.575385] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1811.575658] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] dfe40a8c-61d6-4c60-afd3-0defb61c4308/dfe40a8c-61d6-4c60-afd3-0defb61c4308.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1811.575927] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-256fb620-b1a1-4d04-bc22-c1ea3907c5e7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.585534] env[62684]: DEBUG oslo_vmware.api [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Waiting for the task: (returnval){ [ 1811.585534] env[62684]: value = "task-2052553" [ 1811.585534] env[62684]: _type = "Task" [ 1811.585534] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.595388] env[62684]: DEBUG oslo_vmware.api [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Task: {'id': task-2052553, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.722410] env[62684]: DEBUG nova.compute.manager [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1811.772211] env[62684]: DEBUG oslo_vmware.api [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Task: {'id': task-2052552, 'name': PowerOnVM_Task, 'duration_secs': 0.765176} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1811.772211] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1811.772211] env[62684]: INFO nova.compute.manager [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Took 10.81 seconds to spawn the instance on the hypervisor. [ 1811.772211] env[62684]: DEBUG nova.compute.manager [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1811.773342] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7202c169-9e57-49fd-b71e-47e7383c6b08 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.916394] env[62684]: DEBUG nova.compute.manager [req-090a3494-9d20-4a5a-9c75-afb57043a1fa req-9c3cc053-5215-4784-8943-a8b21320783d service nova] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Received event network-changed-04643826-a651-4eba-be4f-57825aa4f302 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1811.916664] env[62684]: DEBUG nova.compute.manager [req-090a3494-9d20-4a5a-9c75-afb57043a1fa req-9c3cc053-5215-4784-8943-a8b21320783d service nova] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Refreshing instance network info cache due to event network-changed-04643826-a651-4eba-be4f-57825aa4f302. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1811.916922] env[62684]: DEBUG oslo_concurrency.lockutils [req-090a3494-9d20-4a5a-9c75-afb57043a1fa req-9c3cc053-5215-4784-8943-a8b21320783d service nova] Acquiring lock "refresh_cache-f44b2e88-af6d-4252-b562-9d5fa7745b56" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1811.917114] env[62684]: DEBUG oslo_concurrency.lockutils [req-090a3494-9d20-4a5a-9c75-afb57043a1fa req-9c3cc053-5215-4784-8943-a8b21320783d service nova] Acquired lock "refresh_cache-f44b2e88-af6d-4252-b562-9d5fa7745b56" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1811.917285] env[62684]: DEBUG nova.network.neutron [req-090a3494-9d20-4a5a-9c75-afb57043a1fa req-9c3cc053-5215-4784-8943-a8b21320783d service nova] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Refreshing network info cache for port 04643826-a651-4eba-be4f-57825aa4f302 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1811.953513] env[62684]: DEBUG nova.network.neutron [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Successfully created port: 1ce065ea-4c6e-4c34-8b7f-27a3b14d0924 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1812.104705] env[62684]: DEBUG oslo_vmware.api [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Task: {'id': task-2052553, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.464818} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1812.107743] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] dfe40a8c-61d6-4c60-afd3-0defb61c4308/dfe40a8c-61d6-4c60-afd3-0defb61c4308.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1812.107989] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1812.108512] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-39c64f48-3c71-4098-aa1f-03b0a522066d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.116796] env[62684]: DEBUG oslo_vmware.api [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Waiting for the task: (returnval){ [ 1812.116796] env[62684]: value = "task-2052554" [ 1812.116796] env[62684]: _type = "Task" [ 1812.116796] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.129137] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cc59d7c-da3b-4bb8-8b28-1fae1d2aac64 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.134907] env[62684]: DEBUG oslo_vmware.api [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Task: {'id': task-2052554, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.146532] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-201ae2ae-1480-4f57-aaa2-28881ad95710 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.184833] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe3be6d3-0a10-46b1-85ca-8195a12348e5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.193662] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d24c3d3-09cf-4bce-86c3-4509dc6ac6e0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.209025] env[62684]: DEBUG nova.compute.provider_tree [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1812.248156] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1812.301042] env[62684]: INFO nova.compute.manager [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Took 41.29 seconds to build instance. [ 1812.407936] env[62684]: DEBUG nova.compute.manager [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1812.434161] env[62684]: DEBUG nova.virt.hardware [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1812.434560] env[62684]: DEBUG nova.virt.hardware [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1812.434825] env[62684]: DEBUG nova.virt.hardware [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1812.435062] env[62684]: DEBUG nova.virt.hardware [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1812.435256] env[62684]: DEBUG nova.virt.hardware [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1812.435425] env[62684]: DEBUG nova.virt.hardware [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1812.435715] env[62684]: DEBUG nova.virt.hardware [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1812.435826] env[62684]: DEBUG nova.virt.hardware [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1812.436009] env[62684]: DEBUG nova.virt.hardware [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1812.436195] env[62684]: DEBUG nova.virt.hardware [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1812.436380] env[62684]: DEBUG nova.virt.hardware [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1812.437684] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1ae162f-3714-43a6-988f-fdfe5a2c550c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.451246] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5eb1748-c8bb-42a9-b411-e7504e3fdf7e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.630020] env[62684]: DEBUG oslo_vmware.api [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Task: {'id': task-2052554, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084897} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1812.630020] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1812.630020] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89aa0ac5-dfb2-424b-bf7c-596b4a856027 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.650659] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Reconfiguring VM instance instance-0000001c to attach disk [datastore2] dfe40a8c-61d6-4c60-afd3-0defb61c4308/dfe40a8c-61d6-4c60-afd3-0defb61c4308.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1812.653299] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f1a3d4b-4f6b-4784-839f-2bd51c2abf88 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.673329] env[62684]: DEBUG oslo_vmware.api [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Waiting for the task: (returnval){ [ 1812.673329] env[62684]: value = "task-2052555" [ 1812.673329] env[62684]: _type = "Task" [ 1812.673329] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.682203] env[62684]: DEBUG oslo_vmware.api [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Task: {'id': task-2052555, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.707886] env[62684]: DEBUG nova.network.neutron [req-090a3494-9d20-4a5a-9c75-afb57043a1fa req-9c3cc053-5215-4784-8943-a8b21320783d service nova] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Updated VIF entry in instance network info cache for port 04643826-a651-4eba-be4f-57825aa4f302. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1812.708285] env[62684]: DEBUG nova.network.neutron [req-090a3494-9d20-4a5a-9c75-afb57043a1fa req-9c3cc053-5215-4784-8943-a8b21320783d service nova] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Updating instance_info_cache with network_info: [{"id": "04643826-a651-4eba-be4f-57825aa4f302", "address": "fa:16:3e:c9:b1:61", "network": {"id": "09056ab7-a8ec-4de0-bd66-9437b6d91612", "bridge": "br-int", "label": "tempest-ServersTestJSON-198717452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.144", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41ab694b1d944a9fb8206fa6e4461e78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap04643826-a6", "ovs_interfaceid": "04643826-a651-4eba-be4f-57825aa4f302", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1812.710731] env[62684]: DEBUG nova.scheduler.client.report [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1812.803130] env[62684]: DEBUG oslo_concurrency.lockutils [None req-70350a80-bdc7-45b5-8bbc-7e37a6053a41 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Lock "8c046991-b294-4f33-9fce-a241984d66d7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.472s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1813.186015] env[62684]: DEBUG oslo_vmware.api [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Task: {'id': task-2052555, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.211089] env[62684]: DEBUG oslo_concurrency.lockutils [req-090a3494-9d20-4a5a-9c75-afb57043a1fa req-9c3cc053-5215-4784-8943-a8b21320783d service nova] Releasing lock "refresh_cache-f44b2e88-af6d-4252-b562-9d5fa7745b56" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1813.218426] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.836s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1813.218426] env[62684]: DEBUG nova.compute.manager [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1813.219484] env[62684]: DEBUG oslo_concurrency.lockutils [None req-41d9f57e-f22c-49c2-9ae2-723f18b5410f tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.508s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1813.219716] env[62684]: DEBUG nova.objects.instance [None req-41d9f57e-f22c-49c2-9ae2-723f18b5410f tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Lazy-loading 'resources' on Instance uuid a4767855-0c1d-48c8-98cc-6532ff140b5c {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1813.305296] env[62684]: DEBUG nova.compute.manager [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1813.693230] env[62684]: DEBUG oslo_vmware.api [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Task: {'id': task-2052555, 'name': ReconfigVM_Task, 'duration_secs': 0.69339} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1813.693619] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Reconfigured VM instance instance-0000001c to attach disk [datastore2] dfe40a8c-61d6-4c60-afd3-0defb61c4308/dfe40a8c-61d6-4c60-afd3-0defb61c4308.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1813.694313] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7d8bd460-1bf6-42e4-8f8c-a892d768e318 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.701439] env[62684]: DEBUG oslo_vmware.api [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Waiting for the task: (returnval){ [ 1813.701439] env[62684]: value = "task-2052556" [ 1813.701439] env[62684]: _type = "Task" [ 1813.701439] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.713442] env[62684]: DEBUG oslo_vmware.api [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Task: {'id': task-2052556, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.727343] env[62684]: DEBUG nova.compute.utils [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1813.733109] env[62684]: DEBUG nova.compute.manager [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1813.733389] env[62684]: DEBUG nova.network.neutron [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1813.779610] env[62684]: DEBUG nova.policy [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5892ca114a9d4a6e95c0498c2fc7f2ba', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '61c83953d09c4d1c97eee5a8679c30d4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1813.803668] env[62684]: DEBUG oslo_concurrency.lockutils [None req-748e3eeb-8de3-4a8e-baaf-2c3973440532 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Acquiring lock "8c046991-b294-4f33-9fce-a241984d66d7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1813.803935] env[62684]: DEBUG oslo_concurrency.lockutils [None req-748e3eeb-8de3-4a8e-baaf-2c3973440532 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Lock "8c046991-b294-4f33-9fce-a241984d66d7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1813.805025] env[62684]: DEBUG oslo_concurrency.lockutils [None req-748e3eeb-8de3-4a8e-baaf-2c3973440532 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Acquiring lock "8c046991-b294-4f33-9fce-a241984d66d7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1813.805025] env[62684]: DEBUG oslo_concurrency.lockutils [None req-748e3eeb-8de3-4a8e-baaf-2c3973440532 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Lock "8c046991-b294-4f33-9fce-a241984d66d7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1813.805025] env[62684]: DEBUG oslo_concurrency.lockutils [None req-748e3eeb-8de3-4a8e-baaf-2c3973440532 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Lock "8c046991-b294-4f33-9fce-a241984d66d7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1813.812826] env[62684]: INFO nova.compute.manager [None req-748e3eeb-8de3-4a8e-baaf-2c3973440532 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Terminating instance [ 1813.818749] env[62684]: DEBUG nova.compute.manager [None req-748e3eeb-8de3-4a8e-baaf-2c3973440532 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1813.819025] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-748e3eeb-8de3-4a8e-baaf-2c3973440532 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1813.820492] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72c7806c-c18c-4160-b5f9-e35328120da5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.829819] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-748e3eeb-8de3-4a8e-baaf-2c3973440532 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1813.833427] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e7664ddf-ad11-4c55-b71e-afd52b951f9a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.842751] env[62684]: DEBUG oslo_vmware.api [None req-748e3eeb-8de3-4a8e-baaf-2c3973440532 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Waiting for the task: (returnval){ [ 1813.842751] env[62684]: value = "task-2052557" [ 1813.842751] env[62684]: _type = "Task" [ 1813.842751] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.852133] env[62684]: DEBUG oslo_vmware.api [None req-748e3eeb-8de3-4a8e-baaf-2c3973440532 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Task: {'id': task-2052557, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.857361] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1814.126451] env[62684]: DEBUG nova.network.neutron [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Successfully created port: 75bc051d-717c-4b1e-9b3c-e79874e6d941 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1814.219948] env[62684]: DEBUG oslo_vmware.api [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Task: {'id': task-2052556, 'name': Rename_Task, 'duration_secs': 0.184521} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.219948] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1814.219948] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f3b3876b-163d-4e38-ba1e-5cd4942958e2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.230626] env[62684]: DEBUG oslo_vmware.api [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Waiting for the task: (returnval){ [ 1814.230626] env[62684]: value = "task-2052558" [ 1814.230626] env[62684]: _type = "Task" [ 1814.230626] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.236297] env[62684]: DEBUG nova.compute.manager [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1814.243250] env[62684]: DEBUG oslo_vmware.api [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Task: {'id': task-2052558, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.355623] env[62684]: DEBUG oslo_vmware.api [None req-748e3eeb-8de3-4a8e-baaf-2c3973440532 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Task: {'id': task-2052557, 'name': PowerOffVM_Task, 'duration_secs': 0.171161} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.355906] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-748e3eeb-8de3-4a8e-baaf-2c3973440532 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1814.356096] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-748e3eeb-8de3-4a8e-baaf-2c3973440532 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1814.356397] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-371aacfd-994c-46a9-8843-e8310736b39d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.378924] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27092e1d-3b5f-485e-8fd0-419704792ba8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.390856] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b038667-7e67-4838-8787-cb47ad3c810c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.432780] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65002e4a-9a63-4724-891a-d8a50ddea7ce {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.440631] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fd7eaee-7567-4214-bd97-d5aa8030576c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.456818] env[62684]: DEBUG nova.compute.provider_tree [None req-41d9f57e-f22c-49c2-9ae2-723f18b5410f tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1814.600681] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-748e3eeb-8de3-4a8e-baaf-2c3973440532 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1814.600930] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-748e3eeb-8de3-4a8e-baaf-2c3973440532 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1814.601137] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-748e3eeb-8de3-4a8e-baaf-2c3973440532 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Deleting the datastore file [datastore2] 8c046991-b294-4f33-9fce-a241984d66d7 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1814.601417] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8e87a97b-3504-4197-bafa-0339f19afe43 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.610419] env[62684]: DEBUG oslo_vmware.api [None req-748e3eeb-8de3-4a8e-baaf-2c3973440532 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Waiting for the task: (returnval){ [ 1814.610419] env[62684]: value = "task-2052560" [ 1814.610419] env[62684]: _type = "Task" [ 1814.610419] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.619473] env[62684]: DEBUG oslo_vmware.api [None req-748e3eeb-8de3-4a8e-baaf-2c3973440532 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Task: {'id': task-2052560, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.737781] env[62684]: DEBUG nova.compute.manager [req-3b996c86-16ee-4dfd-9de9-3f6865e632bb req-f55cec54-2cc5-4967-8213-2c2ebf133e87 service nova] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Received event network-vif-plugged-1ce065ea-4c6e-4c34-8b7f-27a3b14d0924 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1814.737991] env[62684]: DEBUG oslo_concurrency.lockutils [req-3b996c86-16ee-4dfd-9de9-3f6865e632bb req-f55cec54-2cc5-4967-8213-2c2ebf133e87 service nova] Acquiring lock "3a172e9f-9f79-489e-9571-80bd74ad8609-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1814.738234] env[62684]: DEBUG oslo_concurrency.lockutils [req-3b996c86-16ee-4dfd-9de9-3f6865e632bb req-f55cec54-2cc5-4967-8213-2c2ebf133e87 service nova] Lock "3a172e9f-9f79-489e-9571-80bd74ad8609-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1814.738473] env[62684]: DEBUG oslo_concurrency.lockutils [req-3b996c86-16ee-4dfd-9de9-3f6865e632bb req-f55cec54-2cc5-4967-8213-2c2ebf133e87 service nova] Lock "3a172e9f-9f79-489e-9571-80bd74ad8609-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1814.738561] env[62684]: DEBUG nova.compute.manager [req-3b996c86-16ee-4dfd-9de9-3f6865e632bb req-f55cec54-2cc5-4967-8213-2c2ebf133e87 service nova] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] No waiting events found dispatching network-vif-plugged-1ce065ea-4c6e-4c34-8b7f-27a3b14d0924 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1814.738782] env[62684]: WARNING nova.compute.manager [req-3b996c86-16ee-4dfd-9de9-3f6865e632bb req-f55cec54-2cc5-4967-8213-2c2ebf133e87 service nova] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Received unexpected event network-vif-plugged-1ce065ea-4c6e-4c34-8b7f-27a3b14d0924 for instance with vm_state building and task_state spawning. [ 1814.752018] env[62684]: DEBUG oslo_vmware.api [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Task: {'id': task-2052558, 'name': PowerOnVM_Task, 'duration_secs': 0.467662} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.752018] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1814.752018] env[62684]: INFO nova.compute.manager [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Took 9.35 seconds to spawn the instance on the hypervisor. [ 1814.752018] env[62684]: DEBUG nova.compute.manager [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1814.752018] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91ee54db-a9d7-42c9-a760-164d0a3fadbb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.778253] env[62684]: DEBUG nova.network.neutron [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Successfully updated port: 1ce065ea-4c6e-4c34-8b7f-27a3b14d0924 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1814.960993] env[62684]: DEBUG nova.scheduler.client.report [None req-41d9f57e-f22c-49c2-9ae2-723f18b5410f tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1815.123073] env[62684]: DEBUG oslo_vmware.api [None req-748e3eeb-8de3-4a8e-baaf-2c3973440532 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Task: {'id': task-2052560, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155415} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.123555] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-748e3eeb-8de3-4a8e-baaf-2c3973440532 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1815.123882] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-748e3eeb-8de3-4a8e-baaf-2c3973440532 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1815.124234] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-748e3eeb-8de3-4a8e-baaf-2c3973440532 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1815.124546] env[62684]: INFO nova.compute.manager [None req-748e3eeb-8de3-4a8e-baaf-2c3973440532 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Took 1.31 seconds to destroy the instance on the hypervisor. [ 1815.124933] env[62684]: DEBUG oslo.service.loopingcall [None req-748e3eeb-8de3-4a8e-baaf-2c3973440532 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1815.125404] env[62684]: DEBUG nova.compute.manager [-] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1815.125619] env[62684]: DEBUG nova.network.neutron [-] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1815.252140] env[62684]: DEBUG nova.compute.manager [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1815.274790] env[62684]: INFO nova.compute.manager [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Took 41.69 seconds to build instance. [ 1815.280255] env[62684]: DEBUG nova.virt.hardware [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1815.280255] env[62684]: DEBUG nova.virt.hardware [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1815.280255] env[62684]: DEBUG nova.virt.hardware [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1815.280380] env[62684]: DEBUG nova.virt.hardware [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1815.280380] env[62684]: DEBUG nova.virt.hardware [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1815.280380] env[62684]: DEBUG nova.virt.hardware [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1815.280380] env[62684]: DEBUG nova.virt.hardware [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1815.280380] env[62684]: DEBUG nova.virt.hardware [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1815.280509] env[62684]: DEBUG nova.virt.hardware [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1815.280509] env[62684]: DEBUG nova.virt.hardware [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1815.281599] env[62684]: DEBUG nova.virt.hardware [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1815.282747] env[62684]: DEBUG oslo_concurrency.lockutils [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Acquiring lock "refresh_cache-3a172e9f-9f79-489e-9571-80bd74ad8609" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1815.283117] env[62684]: DEBUG oslo_concurrency.lockutils [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Acquired lock "refresh_cache-3a172e9f-9f79-489e-9571-80bd74ad8609" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1815.283401] env[62684]: DEBUG nova.network.neutron [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1815.286180] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b35562eb-2111-4983-93dc-b9f3757c3a08 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.296596] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cba9ec5d-aaa9-4218-b6ee-8755f7c42c8e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.471526] env[62684]: DEBUG oslo_concurrency.lockutils [None req-41d9f57e-f22c-49c2-9ae2-723f18b5410f tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.252s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1815.478515] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3421b093-a4a7-42db-b6f8-721ef3bcf465 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 29.555s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1815.507133] env[62684]: INFO nova.scheduler.client.report [None req-41d9f57e-f22c-49c2-9ae2-723f18b5410f tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Deleted allocations for instance a4767855-0c1d-48c8-98cc-6532ff140b5c [ 1815.535180] env[62684]: DEBUG oslo_vmware.rw_handles [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528b2565-8f34-7a99-817d-e03fbd09a6c9/disk-0.vmdk. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1815.536815] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-183c282e-27b4-4c72-af28-03b71c88e65c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.545181] env[62684]: DEBUG oslo_vmware.rw_handles [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528b2565-8f34-7a99-817d-e03fbd09a6c9/disk-0.vmdk is in state: ready. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1815.545301] env[62684]: ERROR oslo_vmware.rw_handles [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528b2565-8f34-7a99-817d-e03fbd09a6c9/disk-0.vmdk due to incomplete transfer. [ 1815.545975] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-6b31964b-d6f5-4d61-83a9-55909a884264 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.557272] env[62684]: DEBUG oslo_vmware.rw_handles [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528b2565-8f34-7a99-817d-e03fbd09a6c9/disk-0.vmdk. {{(pid=62684) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1815.559836] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Uploaded image 4c9423b8-2ad2-4bc7-8e51-1037ab451492 to the Glance image server {{(pid=62684) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1815.562030] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Destroying the VM {{(pid=62684) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1815.562310] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-55d6e199-d960-47f8-b6c1-4b5e85a1d447 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.568656] env[62684]: DEBUG oslo_vmware.api [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Waiting for the task: (returnval){ [ 1815.568656] env[62684]: value = "task-2052561" [ 1815.568656] env[62684]: _type = "Task" [ 1815.568656] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.576665] env[62684]: DEBUG oslo_vmware.api [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052561, 'name': Destroy_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.777415] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7e2dbf52-83a2-4bf7-987e-315bfda1d967 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Lock "dfe40a8c-61d6-4c60-afd3-0defb61c4308" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.295s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1815.823298] env[62684]: DEBUG nova.network.neutron [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1815.938494] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5f1da79e-e78e-40aa-93ca-3de7a1264544 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Acquiring lock "dfe40a8c-61d6-4c60-afd3-0defb61c4308" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1815.938866] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5f1da79e-e78e-40aa-93ca-3de7a1264544 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Lock "dfe40a8c-61d6-4c60-afd3-0defb61c4308" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1815.939057] env[62684]: INFO nova.compute.manager [None req-5f1da79e-e78e-40aa-93ca-3de7a1264544 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Rebooting instance [ 1815.941297] env[62684]: DEBUG nova.network.neutron [-] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1816.022727] env[62684]: DEBUG oslo_concurrency.lockutils [None req-41d9f57e-f22c-49c2-9ae2-723f18b5410f tempest-ImagesNegativeTestJSON-1463313401 tempest-ImagesNegativeTestJSON-1463313401-project-member] Lock "a4767855-0c1d-48c8-98cc-6532ff140b5c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.419s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1816.036855] env[62684]: DEBUG nova.network.neutron [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Updating instance_info_cache with network_info: [{"id": "1ce065ea-4c6e-4c34-8b7f-27a3b14d0924", "address": "fa:16:3e:10:ec:c4", "network": {"id": "c4f4fd23-e4d0-4e7a-861b-71eeb3008ae5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1003879390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e169ffb4120143dca6d67108986e62f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ce065ea-4c", "ovs_interfaceid": "1ce065ea-4c6e-4c34-8b7f-27a3b14d0924", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1816.059440] env[62684]: DEBUG nova.compute.manager [req-4b26e068-b33c-45f3-865e-6455b1c9b8f3 req-3350dd0d-0a95-4f28-b01f-d567e7d80a27 service nova] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Received event network-vif-plugged-75bc051d-717c-4b1e-9b3c-e79874e6d941 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1816.059751] env[62684]: DEBUG oslo_concurrency.lockutils [req-4b26e068-b33c-45f3-865e-6455b1c9b8f3 req-3350dd0d-0a95-4f28-b01f-d567e7d80a27 service nova] Acquiring lock "6d4061e4-a074-445d-95c5-239014ee87f3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1816.060080] env[62684]: DEBUG oslo_concurrency.lockutils [req-4b26e068-b33c-45f3-865e-6455b1c9b8f3 req-3350dd0d-0a95-4f28-b01f-d567e7d80a27 service nova] Lock "6d4061e4-a074-445d-95c5-239014ee87f3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1816.060300] env[62684]: DEBUG oslo_concurrency.lockutils [req-4b26e068-b33c-45f3-865e-6455b1c9b8f3 req-3350dd0d-0a95-4f28-b01f-d567e7d80a27 service nova] Lock "6d4061e4-a074-445d-95c5-239014ee87f3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1816.060705] env[62684]: DEBUG nova.compute.manager [req-4b26e068-b33c-45f3-865e-6455b1c9b8f3 req-3350dd0d-0a95-4f28-b01f-d567e7d80a27 service nova] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] No waiting events found dispatching network-vif-plugged-75bc051d-717c-4b1e-9b3c-e79874e6d941 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1816.061346] env[62684]: WARNING nova.compute.manager [req-4b26e068-b33c-45f3-865e-6455b1c9b8f3 req-3350dd0d-0a95-4f28-b01f-d567e7d80a27 service nova] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Received unexpected event network-vif-plugged-75bc051d-717c-4b1e-9b3c-e79874e6d941 for instance with vm_state building and task_state spawning. [ 1816.084988] env[62684]: DEBUG oslo_vmware.api [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052561, 'name': Destroy_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.194461] env[62684]: DEBUG nova.network.neutron [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Successfully updated port: 75bc051d-717c-4b1e-9b3c-e79874e6d941 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1816.280138] env[62684]: DEBUG nova.compute.manager [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1816.448061] env[62684]: INFO nova.compute.manager [-] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Took 1.32 seconds to deallocate network for instance. [ 1816.470013] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5f1da79e-e78e-40aa-93ca-3de7a1264544 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Acquiring lock "refresh_cache-dfe40a8c-61d6-4c60-afd3-0defb61c4308" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1816.470225] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5f1da79e-e78e-40aa-93ca-3de7a1264544 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Acquired lock "refresh_cache-dfe40a8c-61d6-4c60-afd3-0defb61c4308" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1816.470404] env[62684]: DEBUG nova.network.neutron [None req-5f1da79e-e78e-40aa-93ca-3de7a1264544 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1816.540072] env[62684]: DEBUG oslo_concurrency.lockutils [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Releasing lock "refresh_cache-3a172e9f-9f79-489e-9571-80bd74ad8609" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1816.541386] env[62684]: DEBUG nova.compute.manager [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Instance network_info: |[{"id": "1ce065ea-4c6e-4c34-8b7f-27a3b14d0924", "address": "fa:16:3e:10:ec:c4", "network": {"id": "c4f4fd23-e4d0-4e7a-861b-71eeb3008ae5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1003879390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e169ffb4120143dca6d67108986e62f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ce065ea-4c", "ovs_interfaceid": "1ce065ea-4c6e-4c34-8b7f-27a3b14d0924", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1816.541890] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:10:ec:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '816c6e38-e200-4544-8c5b-9fc3e16c5761', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1ce065ea-4c6e-4c34-8b7f-27a3b14d0924', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1816.551962] env[62684]: DEBUG oslo.service.loopingcall [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1816.555693] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1816.556675] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-85c81635-458d-4ddb-ab86-9c5d8aba99b9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.580893] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1816.580893] env[62684]: value = "task-2052562" [ 1816.580893] env[62684]: _type = "Task" [ 1816.580893] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.586560] env[62684]: DEBUG oslo_vmware.api [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052561, 'name': Destroy_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.593656] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd66d005-28da-4da8-84ba-80c57909d323 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.600335] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052562, 'name': CreateVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.604801] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad45f165-a0e6-485d-a50e-ac63fe8dab15 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.637419] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7835989f-6bf6-4b4d-abae-3da10d65de9b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.645161] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-330d4c8d-c58d-4d8c-8540-5bea451a59ec {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.658321] env[62684]: DEBUG nova.compute.provider_tree [None req-3421b093-a4a7-42db-b6f8-721ef3bcf465 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1816.702855] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquiring lock "refresh_cache-6d4061e4-a074-445d-95c5-239014ee87f3" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1816.702993] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquired lock "refresh_cache-6d4061e4-a074-445d-95c5-239014ee87f3" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1816.703185] env[62684]: DEBUG nova.network.neutron [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1816.807242] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1816.928330] env[62684]: DEBUG nova.compute.manager [req-00b15cb4-ada0-4352-92f7-3d8c590ca2a6 req-8d2347c9-61dd-4781-81f1-fe4c9d1de8d9 service nova] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Received event network-changed-1ce065ea-4c6e-4c34-8b7f-27a3b14d0924 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1816.928535] env[62684]: DEBUG nova.compute.manager [req-00b15cb4-ada0-4352-92f7-3d8c590ca2a6 req-8d2347c9-61dd-4781-81f1-fe4c9d1de8d9 service nova] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Refreshing instance network info cache due to event network-changed-1ce065ea-4c6e-4c34-8b7f-27a3b14d0924. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1816.928754] env[62684]: DEBUG oslo_concurrency.lockutils [req-00b15cb4-ada0-4352-92f7-3d8c590ca2a6 req-8d2347c9-61dd-4781-81f1-fe4c9d1de8d9 service nova] Acquiring lock "refresh_cache-3a172e9f-9f79-489e-9571-80bd74ad8609" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1816.928900] env[62684]: DEBUG oslo_concurrency.lockutils [req-00b15cb4-ada0-4352-92f7-3d8c590ca2a6 req-8d2347c9-61dd-4781-81f1-fe4c9d1de8d9 service nova] Acquired lock "refresh_cache-3a172e9f-9f79-489e-9571-80bd74ad8609" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1816.929166] env[62684]: DEBUG nova.network.neutron [req-00b15cb4-ada0-4352-92f7-3d8c590ca2a6 req-8d2347c9-61dd-4781-81f1-fe4c9d1de8d9 service nova] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Refreshing network info cache for port 1ce065ea-4c6e-4c34-8b7f-27a3b14d0924 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1816.957458] env[62684]: DEBUG oslo_concurrency.lockutils [None req-748e3eeb-8de3-4a8e-baaf-2c3973440532 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1817.097913] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052562, 'name': CreateVM_Task, 'duration_secs': 0.306059} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1817.100714] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1817.101132] env[62684]: DEBUG oslo_vmware.api [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052561, 'name': Destroy_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.101825] env[62684]: DEBUG oslo_concurrency.lockutils [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1817.102035] env[62684]: DEBUG oslo_concurrency.lockutils [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1817.102381] env[62684]: DEBUG oslo_concurrency.lockutils [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1817.102672] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1317401-c5e7-4337-80d6-afd453192aac {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.107327] env[62684]: DEBUG oslo_vmware.api [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Waiting for the task: (returnval){ [ 1817.107327] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a51593-ffa1-5625-6520-469d28568f43" [ 1817.107327] env[62684]: _type = "Task" [ 1817.107327] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1817.117879] env[62684]: DEBUG oslo_vmware.api [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a51593-ffa1-5625-6520-469d28568f43, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.162838] env[62684]: DEBUG nova.scheduler.client.report [None req-3421b093-a4a7-42db-b6f8-721ef3bcf465 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1817.259708] env[62684]: DEBUG nova.network.neutron [None req-5f1da79e-e78e-40aa-93ca-3de7a1264544 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Updating instance_info_cache with network_info: [{"id": "140f3118-d3fd-4be3-a661-89c3cac2cb26", "address": "fa:16:3e:eb:04:23", "network": {"id": "afa57c36-637b-4edc-96e8-5e5ff2185258", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-270820560-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5513297908414207afa484e075bd26e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "26a1e556-1ede-445a-bf87-a35daa5d9070", "external-id": "nsx-vlan-transportzone-396", "segmentation_id": 396, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap140f3118-d3", "ovs_interfaceid": "140f3118-d3fd-4be3-a661-89c3cac2cb26", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1817.305772] env[62684]: DEBUG nova.network.neutron [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1817.590760] env[62684]: DEBUG oslo_vmware.api [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052561, 'name': Destroy_Task, 'duration_secs': 2.020023} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1817.591056] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Destroyed the VM [ 1817.591303] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Deleting Snapshot of the VM instance {{(pid=62684) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1817.591567] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-cb93cfa2-c692-43ca-abf9-9a5fa9063ba0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.598222] env[62684]: DEBUG oslo_vmware.api [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Waiting for the task: (returnval){ [ 1817.598222] env[62684]: value = "task-2052563" [ 1817.598222] env[62684]: _type = "Task" [ 1817.598222] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1817.607427] env[62684]: DEBUG oslo_vmware.api [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052563, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.617085] env[62684]: DEBUG oslo_vmware.api [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a51593-ffa1-5625-6520-469d28568f43, 'name': SearchDatastore_Task, 'duration_secs': 0.009995} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1817.617491] env[62684]: DEBUG oslo_concurrency.lockutils [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1817.617815] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1817.618090] env[62684]: DEBUG oslo_concurrency.lockutils [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1817.618238] env[62684]: DEBUG oslo_concurrency.lockutils [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1817.618420] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1817.618693] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0ef4f813-1b55-4bf3-9ded-368be4a4dc35 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.627121] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1817.627306] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1817.628061] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59d74f17-9914-4037-8957-2bf2a98059bf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.636877] env[62684]: DEBUG oslo_vmware.api [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Waiting for the task: (returnval){ [ 1817.636877] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52053b47-bd91-4fb4-8a4c-8b5f98a2e136" [ 1817.636877] env[62684]: _type = "Task" [ 1817.636877] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1817.644483] env[62684]: DEBUG oslo_vmware.api [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52053b47-bd91-4fb4-8a4c-8b5f98a2e136, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.670381] env[62684]: DEBUG nova.network.neutron [req-00b15cb4-ada0-4352-92f7-3d8c590ca2a6 req-8d2347c9-61dd-4781-81f1-fe4c9d1de8d9 service nova] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Updated VIF entry in instance network info cache for port 1ce065ea-4c6e-4c34-8b7f-27a3b14d0924. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1817.670791] env[62684]: DEBUG nova.network.neutron [req-00b15cb4-ada0-4352-92f7-3d8c590ca2a6 req-8d2347c9-61dd-4781-81f1-fe4c9d1de8d9 service nova] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Updating instance_info_cache with network_info: [{"id": "1ce065ea-4c6e-4c34-8b7f-27a3b14d0924", "address": "fa:16:3e:10:ec:c4", "network": {"id": "c4f4fd23-e4d0-4e7a-861b-71eeb3008ae5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1003879390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e169ffb4120143dca6d67108986e62f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ce065ea-4c", "ovs_interfaceid": "1ce065ea-4c6e-4c34-8b7f-27a3b14d0924", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1817.762553] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5f1da79e-e78e-40aa-93ca-3de7a1264544 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Releasing lock "refresh_cache-dfe40a8c-61d6-4c60-afd3-0defb61c4308" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1817.764731] env[62684]: DEBUG nova.compute.manager [None req-5f1da79e-e78e-40aa-93ca-3de7a1264544 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1817.765619] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-635ba9c1-69ad-4de0-8b8d-25b626f573c9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.993409] env[62684]: DEBUG nova.network.neutron [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Updating instance_info_cache with network_info: [{"id": "75bc051d-717c-4b1e-9b3c-e79874e6d941", "address": "fa:16:3e:61:3e:7d", "network": {"id": "95ad5a29-9716-4b0c-937b-33a498c74ef7", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-342262548-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61c83953d09c4d1c97eee5a8679c30d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c68b7663-4f0e-47f0-ac7f-40c6d952f7bb", "external-id": "nsx-vlan-transportzone-696", "segmentation_id": 696, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75bc051d-71", "ovs_interfaceid": "75bc051d-717c-4b1e-9b3c-e79874e6d941", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1818.108538] env[62684]: DEBUG oslo_vmware.api [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052563, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.150304] env[62684]: DEBUG oslo_vmware.api [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52053b47-bd91-4fb4-8a4c-8b5f98a2e136, 'name': SearchDatastore_Task, 'duration_secs': 0.009075} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1818.151358] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0beead4c-0322-41ba-8748-5502c6543090 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.160365] env[62684]: DEBUG oslo_vmware.api [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Waiting for the task: (returnval){ [ 1818.160365] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d8e227-fa7f-3ca5-1b34-f8ab03b7de83" [ 1818.160365] env[62684]: _type = "Task" [ 1818.160365] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.169945] env[62684]: DEBUG oslo_vmware.api [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d8e227-fa7f-3ca5-1b34-f8ab03b7de83, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.174890] env[62684]: DEBUG oslo_concurrency.lockutils [req-00b15cb4-ada0-4352-92f7-3d8c590ca2a6 req-8d2347c9-61dd-4781-81f1-fe4c9d1de8d9 service nova] Releasing lock "refresh_cache-3a172e9f-9f79-489e-9571-80bd74ad8609" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1818.175128] env[62684]: DEBUG nova.compute.manager [req-00b15cb4-ada0-4352-92f7-3d8c590ca2a6 req-8d2347c9-61dd-4781-81f1-fe4c9d1de8d9 service nova] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Received event network-vif-deleted-b474f9ed-1ee0-4186-9dd7-336689da4726 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1818.178209] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3421b093-a4a7-42db-b6f8-721ef3bcf465 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.704s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1818.181433] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.035s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1818.183561] env[62684]: INFO nova.compute.claims [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1818.463998] env[62684]: DEBUG nova.compute.manager [req-c2e453c1-48bb-44fd-8cd5-8610a94df9eb req-810d8035-1653-4491-953a-0f49d2b3e775 service nova] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Received event network-changed-75bc051d-717c-4b1e-9b3c-e79874e6d941 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1818.464603] env[62684]: DEBUG nova.compute.manager [req-c2e453c1-48bb-44fd-8cd5-8610a94df9eb req-810d8035-1653-4491-953a-0f49d2b3e775 service nova] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Refreshing instance network info cache due to event network-changed-75bc051d-717c-4b1e-9b3c-e79874e6d941. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1818.465785] env[62684]: DEBUG oslo_concurrency.lockutils [req-c2e453c1-48bb-44fd-8cd5-8610a94df9eb req-810d8035-1653-4491-953a-0f49d2b3e775 service nova] Acquiring lock "refresh_cache-6d4061e4-a074-445d-95c5-239014ee87f3" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1818.498079] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Releasing lock "refresh_cache-6d4061e4-a074-445d-95c5-239014ee87f3" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1818.498079] env[62684]: DEBUG nova.compute.manager [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Instance network_info: |[{"id": "75bc051d-717c-4b1e-9b3c-e79874e6d941", "address": "fa:16:3e:61:3e:7d", "network": {"id": "95ad5a29-9716-4b0c-937b-33a498c74ef7", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-342262548-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61c83953d09c4d1c97eee5a8679c30d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c68b7663-4f0e-47f0-ac7f-40c6d952f7bb", "external-id": "nsx-vlan-transportzone-696", "segmentation_id": 696, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75bc051d-71", "ovs_interfaceid": "75bc051d-717c-4b1e-9b3c-e79874e6d941", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1818.498367] env[62684]: DEBUG oslo_concurrency.lockutils [req-c2e453c1-48bb-44fd-8cd5-8610a94df9eb req-810d8035-1653-4491-953a-0f49d2b3e775 service nova] Acquired lock "refresh_cache-6d4061e4-a074-445d-95c5-239014ee87f3" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1818.498367] env[62684]: DEBUG nova.network.neutron [req-c2e453c1-48bb-44fd-8cd5-8610a94df9eb req-810d8035-1653-4491-953a-0f49d2b3e775 service nova] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Refreshing network info cache for port 75bc051d-717c-4b1e-9b3c-e79874e6d941 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1818.498367] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:3e:7d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c68b7663-4f0e-47f0-ac7f-40c6d952f7bb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '75bc051d-717c-4b1e-9b3c-e79874e6d941', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1818.512014] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Creating folder: Project (61c83953d09c4d1c97eee5a8679c30d4). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1818.514052] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-12a48b0a-2619-47b4-ab87-b535703e7395 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.525629] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Created folder: Project (61c83953d09c4d1c97eee5a8679c30d4) in parent group-v421118. [ 1818.525926] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Creating folder: Instances. Parent ref: group-v421209. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1818.526075] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6fad2a53-3121-41c2-ac55-21658fc3d833 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.534775] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Created folder: Instances in parent group-v421209. [ 1818.535032] env[62684]: DEBUG oslo.service.loopingcall [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1818.535295] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1818.535484] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8e251399-e667-41ef-b3ab-d40c742279a5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.555774] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1818.555774] env[62684]: value = "task-2052566" [ 1818.555774] env[62684]: _type = "Task" [ 1818.555774] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.566549] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052566, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.608758] env[62684]: DEBUG oslo_vmware.api [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052563, 'name': RemoveSnapshot_Task, 'duration_secs': 0.907252} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1818.609059] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Deleted Snapshot of the VM instance {{(pid=62684) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1818.609332] env[62684]: INFO nova.compute.manager [None req-5bb26e61-6cbe-4bf3-8818-4b081b4290f3 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Took 18.52 seconds to snapshot the instance on the hypervisor. [ 1818.670860] env[62684]: DEBUG oslo_vmware.api [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d8e227-fa7f-3ca5-1b34-f8ab03b7de83, 'name': SearchDatastore_Task, 'duration_secs': 0.014746} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1818.671608] env[62684]: DEBUG oslo_concurrency.lockutils [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1818.672327] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 3a172e9f-9f79-489e-9571-80bd74ad8609/3a172e9f-9f79-489e-9571-80bd74ad8609.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1818.672756] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9fec9202-f1e0-4735-9152-c71c2ec61513 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.683130] env[62684]: DEBUG oslo_vmware.api [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Waiting for the task: (returnval){ [ 1818.683130] env[62684]: value = "task-2052567" [ 1818.683130] env[62684]: _type = "Task" [ 1818.683130] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.696016] env[62684]: DEBUG oslo_vmware.api [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': task-2052567, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.771143] env[62684]: INFO nova.scheduler.client.report [None req-3421b093-a4a7-42db-b6f8-721ef3bcf465 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Deleted allocation for migration 96f2bb25-db44-4b5b-b5d0-be024988a0cc [ 1818.784919] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3615b355-4e71-41c2-bb89-370f0e714622 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.792446] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5f1da79e-e78e-40aa-93ca-3de7a1264544 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Doing hard reboot of VM {{(pid=62684) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1818.792717] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-21b692c8-fb7c-4727-a33c-e1d3de063dcc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.799307] env[62684]: DEBUG oslo_vmware.api [None req-5f1da79e-e78e-40aa-93ca-3de7a1264544 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Waiting for the task: (returnval){ [ 1818.799307] env[62684]: value = "task-2052568" [ 1818.799307] env[62684]: _type = "Task" [ 1818.799307] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.807585] env[62684]: DEBUG oslo_vmware.api [None req-5f1da79e-e78e-40aa-93ca-3de7a1264544 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Task: {'id': task-2052568, 'name': ResetVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.071236] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052566, 'name': CreateVM_Task, 'duration_secs': 0.470721} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1819.071236] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1819.071236] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1819.071236] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1819.071236] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1819.071236] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55b789f5-b13e-4516-ba71-521c603d162d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.077882] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for the task: (returnval){ [ 1819.077882] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]524fd891-6460-defb-c5e7-e41787eb6103" [ 1819.077882] env[62684]: _type = "Task" [ 1819.077882] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.092616] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]524fd891-6460-defb-c5e7-e41787eb6103, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.193836] env[62684]: DEBUG oslo_vmware.api [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': task-2052567, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.280077] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3421b093-a4a7-42db-b6f8-721ef3bcf465 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Lock "0676806b-c1f0-4c1a-a12d-add2edf1588f" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 37.309s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1819.316375] env[62684]: DEBUG oslo_vmware.api [None req-5f1da79e-e78e-40aa-93ca-3de7a1264544 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Task: {'id': task-2052568, 'name': ResetVM_Task, 'duration_secs': 0.113745} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1819.320107] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5f1da79e-e78e-40aa-93ca-3de7a1264544 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Did hard reboot of VM {{(pid=62684) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1819.320337] env[62684]: DEBUG nova.compute.manager [None req-5f1da79e-e78e-40aa-93ca-3de7a1264544 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1819.321567] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11482225-dcec-4b2a-85b6-e36694927348 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.453301] env[62684]: DEBUG nova.network.neutron [req-c2e453c1-48bb-44fd-8cd5-8610a94df9eb req-810d8035-1653-4491-953a-0f49d2b3e775 service nova] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Updated VIF entry in instance network info cache for port 75bc051d-717c-4b1e-9b3c-e79874e6d941. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1819.453731] env[62684]: DEBUG nova.network.neutron [req-c2e453c1-48bb-44fd-8cd5-8610a94df9eb req-810d8035-1653-4491-953a-0f49d2b3e775 service nova] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Updating instance_info_cache with network_info: [{"id": "75bc051d-717c-4b1e-9b3c-e79874e6d941", "address": "fa:16:3e:61:3e:7d", "network": {"id": "95ad5a29-9716-4b0c-937b-33a498c74ef7", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-342262548-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61c83953d09c4d1c97eee5a8679c30d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c68b7663-4f0e-47f0-ac7f-40c6d952f7bb", "external-id": "nsx-vlan-transportzone-696", "segmentation_id": 696, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75bc051d-71", "ovs_interfaceid": "75bc051d-717c-4b1e-9b3c-e79874e6d941", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1819.591736] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]524fd891-6460-defb-c5e7-e41787eb6103, 'name': SearchDatastore_Task, 'duration_secs': 0.054214} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1819.594522] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1819.594781] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1819.595035] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1819.595228] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1819.595427] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1819.596702] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b2ce1433-f41c-47d2-8e6b-9209541bc2b1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.604982] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1819.605195] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1819.605932] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74bb5a7d-cab1-427d-8859-681a421cd29e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.611565] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for the task: (returnval){ [ 1819.611565] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e5eeb5-92ba-b856-4d79-0d59249c0d9e" [ 1819.611565] env[62684]: _type = "Task" [ 1819.611565] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.623164] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e5eeb5-92ba-b856-4d79-0d59249c0d9e, 'name': SearchDatastore_Task, 'duration_secs': 0.008936} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1819.623988] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a6fcc0e-942b-4ada-a2f8-56bfdae5961a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.632646] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for the task: (returnval){ [ 1819.632646] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]526a5a42-8456-db11-1b7f-4680cec4d399" [ 1819.632646] env[62684]: _type = "Task" [ 1819.632646] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.648078] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]526a5a42-8456-db11-1b7f-4680cec4d399, 'name': SearchDatastore_Task, 'duration_secs': 0.008873} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1819.650530] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1819.650952] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 6d4061e4-a074-445d-95c5-239014ee87f3/6d4061e4-a074-445d-95c5-239014ee87f3.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1819.651443] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6f88cc48-47d6-4606-8548-6cab557e7dcd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.659268] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for the task: (returnval){ [ 1819.659268] env[62684]: value = "task-2052569" [ 1819.659268] env[62684]: _type = "Task" [ 1819.659268] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.674346] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052569, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.698911] env[62684]: DEBUG oslo_vmware.api [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': task-2052567, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.514055} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1819.699181] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 3a172e9f-9f79-489e-9571-80bd74ad8609/3a172e9f-9f79-489e-9571-80bd74ad8609.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1819.699391] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1819.699643] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-96bebbd3-40c2-44c9-9a78-c2b2a74e7602 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.708872] env[62684]: DEBUG oslo_vmware.api [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Waiting for the task: (returnval){ [ 1819.708872] env[62684]: value = "task-2052570" [ 1819.708872] env[62684]: _type = "Task" [ 1819.708872] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.717499] env[62684]: DEBUG oslo_vmware.api [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': task-2052570, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.814216] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3f27f3c-3b94-4970-824f-3a81727899b3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.827493] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-993cde4e-ec1e-44a5-86ce-57d31140614d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.869353] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdb2c1ce-1bdc-4145-b577-9572342ba27e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.873811] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5f1da79e-e78e-40aa-93ca-3de7a1264544 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Lock "dfe40a8c-61d6-4c60-afd3-0defb61c4308" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 3.935s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1819.882104] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4e559f5-b230-4bb9-b22a-3dfc7636bb75 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.900322] env[62684]: DEBUG nova.compute.provider_tree [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1819.960794] env[62684]: DEBUG oslo_concurrency.lockutils [req-c2e453c1-48bb-44fd-8cd5-8610a94df9eb req-810d8035-1653-4491-953a-0f49d2b3e775 service nova] Releasing lock "refresh_cache-6d4061e4-a074-445d-95c5-239014ee87f3" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1820.169499] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052569, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.504056} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1820.169766] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 6d4061e4-a074-445d-95c5-239014ee87f3/6d4061e4-a074-445d-95c5-239014ee87f3.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1820.169976] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1820.170249] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9e0df489-4b20-4bd7-aa7b-83fbbde575a3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.176022] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for the task: (returnval){ [ 1820.176022] env[62684]: value = "task-2052571" [ 1820.176022] env[62684]: _type = "Task" [ 1820.176022] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1820.183580] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052571, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.224119] env[62684]: DEBUG oslo_vmware.api [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': task-2052570, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068982} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1820.224119] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1820.224119] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d83503d3-4d25-4f7e-b687-438eafaea96f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.246880] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Reconfiguring VM instance instance-0000001d to attach disk [datastore1] 3a172e9f-9f79-489e-9571-80bd74ad8609/3a172e9f-9f79-489e-9571-80bd74ad8609.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1820.249101] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bb84a76c-c780-4ac4-9963-9083303786c3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.276337] env[62684]: DEBUG oslo_vmware.api [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Waiting for the task: (returnval){ [ 1820.276337] env[62684]: value = "task-2052572" [ 1820.276337] env[62684]: _type = "Task" [ 1820.276337] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1820.284500] env[62684]: DEBUG oslo_vmware.api [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': task-2052572, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.411195] env[62684]: DEBUG nova.scheduler.client.report [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1820.686211] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052571, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064021} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1820.686545] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1820.687365] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e476d3d7-7bf5-4bd5-8bb5-a5be8252665a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.718689] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Reconfiguring VM instance instance-0000001e to attach disk [datastore1] 6d4061e4-a074-445d-95c5-239014ee87f3/6d4061e4-a074-445d-95c5-239014ee87f3.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1820.719065] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1b71e56c-dd76-49b1-ac5b-59032353f7c6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.746088] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for the task: (returnval){ [ 1820.746088] env[62684]: value = "task-2052573" [ 1820.746088] env[62684]: _type = "Task" [ 1820.746088] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1820.755334] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052573, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.775148] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8284ed08-d9f0-424a-b027-14e8b785b229 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Acquiring lock "dfe40a8c-61d6-4c60-afd3-0defb61c4308" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1820.775494] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8284ed08-d9f0-424a-b027-14e8b785b229 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Lock "dfe40a8c-61d6-4c60-afd3-0defb61c4308" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1820.775677] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8284ed08-d9f0-424a-b027-14e8b785b229 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Acquiring lock "dfe40a8c-61d6-4c60-afd3-0defb61c4308-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1820.775859] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8284ed08-d9f0-424a-b027-14e8b785b229 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Lock "dfe40a8c-61d6-4c60-afd3-0defb61c4308-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1820.776034] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8284ed08-d9f0-424a-b027-14e8b785b229 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Lock "dfe40a8c-61d6-4c60-afd3-0defb61c4308-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1820.782173] env[62684]: INFO nova.compute.manager [None req-8284ed08-d9f0-424a-b027-14e8b785b229 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Terminating instance [ 1820.784329] env[62684]: DEBUG nova.compute.manager [None req-8284ed08-d9f0-424a-b027-14e8b785b229 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1820.784554] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8284ed08-d9f0-424a-b027-14e8b785b229 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1820.785873] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1d8fb3a-4bce-4c96-95cc-d417d06e3b03 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.791989] env[62684]: DEBUG oslo_vmware.api [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': task-2052572, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.799038] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8284ed08-d9f0-424a-b027-14e8b785b229 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1820.799038] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0a474b4f-bd9e-43df-a097-79d0cc9e83fa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.804220] env[62684]: DEBUG oslo_vmware.api [None req-8284ed08-d9f0-424a-b027-14e8b785b229 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Waiting for the task: (returnval){ [ 1820.804220] env[62684]: value = "task-2052574" [ 1820.804220] env[62684]: _type = "Task" [ 1820.804220] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1820.817196] env[62684]: DEBUG oslo_vmware.api [None req-8284ed08-d9f0-424a-b027-14e8b785b229 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Task: {'id': task-2052574, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.916209] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.735s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1820.917866] env[62684]: DEBUG nova.compute.manager [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1820.923308] env[62684]: DEBUG oslo_concurrency.lockutils [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.942s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1820.924887] env[62684]: INFO nova.compute.claims [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1820.993494] env[62684]: DEBUG nova.compute.manager [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1820.994658] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3efe841f-e243-4322-88c3-e4e0b5e8e604 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.257124] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052573, 'name': ReconfigVM_Task, 'duration_secs': 0.386227} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1821.257453] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Reconfigured VM instance instance-0000001e to attach disk [datastore1] 6d4061e4-a074-445d-95c5-239014ee87f3/6d4061e4-a074-445d-95c5-239014ee87f3.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1821.258168] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5f76b746-dc87-4b91-a884-fa869baee007 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.266796] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for the task: (returnval){ [ 1821.266796] env[62684]: value = "task-2052575" [ 1821.266796] env[62684]: _type = "Task" [ 1821.266796] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1821.280974] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052575, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.291792] env[62684]: DEBUG oslo_vmware.api [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': task-2052572, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.327022] env[62684]: DEBUG oslo_vmware.api [None req-8284ed08-d9f0-424a-b027-14e8b785b229 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Task: {'id': task-2052574, 'name': PowerOffVM_Task, 'duration_secs': 0.195986} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1821.327022] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8284ed08-d9f0-424a-b027-14e8b785b229 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1821.327022] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8284ed08-d9f0-424a-b027-14e8b785b229 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1821.327022] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bf3882a0-fa7e-4bce-9576-9ea8520779d1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.411437] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8284ed08-d9f0-424a-b027-14e8b785b229 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1821.411712] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8284ed08-d9f0-424a-b027-14e8b785b229 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1821.411900] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-8284ed08-d9f0-424a-b027-14e8b785b229 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Deleting the datastore file [datastore2] dfe40a8c-61d6-4c60-afd3-0defb61c4308 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1821.412277] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ce225d12-43b4-451a-992e-07a9582750d4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.423987] env[62684]: DEBUG oslo_vmware.api [None req-8284ed08-d9f0-424a-b027-14e8b785b229 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Waiting for the task: (returnval){ [ 1821.423987] env[62684]: value = "task-2052577" [ 1821.423987] env[62684]: _type = "Task" [ 1821.423987] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1821.438929] env[62684]: DEBUG nova.compute.utils [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1821.438929] env[62684]: DEBUG nova.compute.manager [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1821.438929] env[62684]: DEBUG nova.network.neutron [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1821.451054] env[62684]: DEBUG oslo_vmware.api [None req-8284ed08-d9f0-424a-b027-14e8b785b229 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Task: {'id': task-2052577, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.509628] env[62684]: INFO nova.compute.manager [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] instance snapshotting [ 1821.512697] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7954841-e4c9-4179-bb1c-717ba40f328c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.542770] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-429cdf20-ccff-41b3-ad80-e9311d6cd993 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.556370] env[62684]: DEBUG nova.policy [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5892ca114a9d4a6e95c0498c2fc7f2ba', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '61c83953d09c4d1c97eee5a8679c30d4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1821.784478] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052575, 'name': Rename_Task, 'duration_secs': 0.163378} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1821.790436] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1821.790762] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d34b85c6-acc2-470b-9867-b4bf6965cdf0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.801709] env[62684]: DEBUG oslo_vmware.api [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': task-2052572, 'name': ReconfigVM_Task, 'duration_secs': 1.265811} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1821.803171] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Reconfigured VM instance instance-0000001d to attach disk [datastore1] 3a172e9f-9f79-489e-9571-80bd74ad8609/3a172e9f-9f79-489e-9571-80bd74ad8609.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1821.803920] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for the task: (returnval){ [ 1821.803920] env[62684]: value = "task-2052578" [ 1821.803920] env[62684]: _type = "Task" [ 1821.803920] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1821.804156] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e4c38842-d9cf-435d-9321-a1a77511484c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.815506] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052578, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.817523] env[62684]: DEBUG oslo_vmware.api [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Waiting for the task: (returnval){ [ 1821.817523] env[62684]: value = "task-2052579" [ 1821.817523] env[62684]: _type = "Task" [ 1821.817523] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1821.835523] env[62684]: DEBUG oslo_vmware.api [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': task-2052579, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.935013] env[62684]: DEBUG oslo_vmware.api [None req-8284ed08-d9f0-424a-b027-14e8b785b229 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Task: {'id': task-2052577, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.173393} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1821.935331] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-8284ed08-d9f0-424a-b027-14e8b785b229 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1821.935605] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8284ed08-d9f0-424a-b027-14e8b785b229 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1821.935707] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8284ed08-d9f0-424a-b027-14e8b785b229 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1821.935902] env[62684]: INFO nova.compute.manager [None req-8284ed08-d9f0-424a-b027-14e8b785b229 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1821.936166] env[62684]: DEBUG oslo.service.loopingcall [None req-8284ed08-d9f0-424a-b027-14e8b785b229 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1821.936398] env[62684]: DEBUG nova.compute.manager [-] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1821.936498] env[62684]: DEBUG nova.network.neutron [-] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1821.944564] env[62684]: DEBUG nova.compute.manager [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1822.061453] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Creating Snapshot of the VM instance {{(pid=62684) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1822.062314] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-2f24cc27-5489-486a-9802-bbcc6f90fa58 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.072979] env[62684]: DEBUG oslo_vmware.api [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Waiting for the task: (returnval){ [ 1822.072979] env[62684]: value = "task-2052580" [ 1822.072979] env[62684]: _type = "Task" [ 1822.072979] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.090428] env[62684]: DEBUG oslo_vmware.api [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052580, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.125365] env[62684]: DEBUG nova.network.neutron [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Successfully created port: e6a11cfd-6369-4dd8-a41b-dd7ef4d49a87 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1822.320439] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052578, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.330087] env[62684]: DEBUG oslo_vmware.api [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': task-2052579, 'name': Rename_Task, 'duration_secs': 0.200447} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1822.330516] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1822.330790] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-946ac998-46bc-4ce9-978b-0813c745fc37 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.338483] env[62684]: DEBUG oslo_vmware.api [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Waiting for the task: (returnval){ [ 1822.338483] env[62684]: value = "task-2052581" [ 1822.338483] env[62684]: _type = "Task" [ 1822.338483] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.348719] env[62684]: DEBUG oslo_vmware.api [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': task-2052581, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.531980] env[62684]: DEBUG oslo_concurrency.lockutils [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquiring lock "b1f70e39-bf37-4fb8-b95b-653b59bec265" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1822.532461] env[62684]: DEBUG oslo_concurrency.lockutils [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lock "b1f70e39-bf37-4fb8-b95b-653b59bec265" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1822.587021] env[62684]: DEBUG oslo_vmware.api [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052580, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.614013] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67c2c6e1-98f4-4f82-803b-887c8baeef55 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.624252] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3481447-80b9-406a-812b-9819728c412c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.659655] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ec4fd0e-79be-4eb7-addc-29b070192577 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.668094] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc45f193-3398-4def-9762-ca6656975811 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.686784] env[62684]: DEBUG nova.compute.provider_tree [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1822.791439] env[62684]: DEBUG nova.network.neutron [-] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1822.797143] env[62684]: DEBUG nova.compute.manager [req-98174b2d-b368-4579-9580-49fb17a056a5 req-8fadbd8a-82a6-49d0-ab4f-8a748f80b379 service nova] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Received event network-vif-deleted-140f3118-d3fd-4be3-a661-89c3cac2cb26 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1822.797143] env[62684]: INFO nova.compute.manager [req-98174b2d-b368-4579-9580-49fb17a056a5 req-8fadbd8a-82a6-49d0-ab4f-8a748f80b379 service nova] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Neutron deleted interface 140f3118-d3fd-4be3-a661-89c3cac2cb26; detaching it from the instance and deleting it from the info cache [ 1822.797143] env[62684]: DEBUG nova.network.neutron [req-98174b2d-b368-4579-9580-49fb17a056a5 req-8fadbd8a-82a6-49d0-ab4f-8a748f80b379 service nova] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1822.822980] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052578, 'name': PowerOnVM_Task, 'duration_secs': 0.733223} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1822.823296] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1822.823502] env[62684]: INFO nova.compute.manager [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Took 7.57 seconds to spawn the instance on the hypervisor. [ 1822.823696] env[62684]: DEBUG nova.compute.manager [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1822.824515] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a856eae-eabf-4c26-ad63-2e273193520e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.850549] env[62684]: DEBUG oslo_vmware.api [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': task-2052581, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.958936] env[62684]: DEBUG nova.compute.manager [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1822.988835] env[62684]: DEBUG nova.virt.hardware [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1822.988977] env[62684]: DEBUG nova.virt.hardware [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1822.989240] env[62684]: DEBUG nova.virt.hardware [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1822.989372] env[62684]: DEBUG nova.virt.hardware [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1822.989564] env[62684]: DEBUG nova.virt.hardware [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1822.989688] env[62684]: DEBUG nova.virt.hardware [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1822.992180] env[62684]: DEBUG nova.virt.hardware [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1822.992180] env[62684]: DEBUG nova.virt.hardware [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1822.992180] env[62684]: DEBUG nova.virt.hardware [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1822.992180] env[62684]: DEBUG nova.virt.hardware [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1822.992180] env[62684]: DEBUG nova.virt.hardware [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1822.992755] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c08ad580-898f-4370-89dc-a777d5a8dfe2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.004145] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfeead56-caa9-44bf-a388-6714dc0d4280 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.083089] env[62684]: DEBUG oslo_vmware.api [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052580, 'name': CreateSnapshot_Task, 'duration_secs': 0.79778} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.083401] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Created Snapshot of the VM instance {{(pid=62684) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1823.084154] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68d5025e-7478-4589-8bde-584603700013 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.207175] env[62684]: ERROR nova.scheduler.client.report [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [req-b7ab5b8a-23f0-4707-b2a0-a3b1555b973a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c23c281e-ec1f-4876-972e-a98655f2084f. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-b7ab5b8a-23f0-4707-b2a0-a3b1555b973a"}]} [ 1823.221464] env[62684]: DEBUG nova.scheduler.client.report [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Refreshing inventories for resource provider c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1823.236364] env[62684]: DEBUG nova.scheduler.client.report [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Updating ProviderTree inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1823.236474] env[62684]: DEBUG nova.compute.provider_tree [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1823.247936] env[62684]: DEBUG nova.scheduler.client.report [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Refreshing aggregate associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, aggregates: None {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1823.265043] env[62684]: DEBUG nova.scheduler.client.report [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Refreshing trait associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1823.294215] env[62684]: INFO nova.compute.manager [-] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Took 1.36 seconds to deallocate network for instance. [ 1823.301528] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-489a559f-68c9-4521-b28d-6c9065beb8c4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.316765] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb56efb2-6933-4be2-b7a1-be8d6507b5ed {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.357163] env[62684]: DEBUG nova.compute.manager [req-98174b2d-b368-4579-9580-49fb17a056a5 req-8fadbd8a-82a6-49d0-ab4f-8a748f80b379 service nova] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Detach interface failed, port_id=140f3118-d3fd-4be3-a661-89c3cac2cb26, reason: Instance dfe40a8c-61d6-4c60-afd3-0defb61c4308 could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1823.362563] env[62684]: INFO nova.compute.manager [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Took 39.95 seconds to build instance. [ 1823.369718] env[62684]: DEBUG oslo_vmware.api [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': task-2052581, 'name': PowerOnVM_Task, 'duration_secs': 0.581802} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.372239] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1823.372456] env[62684]: INFO nova.compute.manager [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Took 10.96 seconds to spawn the instance on the hypervisor. [ 1823.372639] env[62684]: DEBUG nova.compute.manager [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1823.373625] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d5b2141-2b37-4ff3-8dcf-a041c487a07c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.462468] env[62684]: DEBUG oslo_concurrency.lockutils [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Acquiring lock "26303c0e-be87-41ff-a15c-e92f91f8a05f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1823.462468] env[62684]: DEBUG oslo_concurrency.lockutils [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Lock "26303c0e-be87-41ff-a15c-e92f91f8a05f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1823.604534] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Creating linked-clone VM from snapshot {{(pid=62684) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1823.605092] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-c2b42bf9-730e-40a9-b653-0ec5061b40b7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.613840] env[62684]: DEBUG oslo_vmware.api [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Waiting for the task: (returnval){ [ 1823.613840] env[62684]: value = "task-2052582" [ 1823.613840] env[62684]: _type = "Task" [ 1823.613840] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.625333] env[62684]: DEBUG oslo_vmware.api [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052582, 'name': CloneVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.757192] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9934013-e761-4abc-be2d-8ec28ed70932 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.765795] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4f25e61-e59b-4ad0-9282-b65e3d7ebc87 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.801884] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e969a75c-39b2-4e15-b44d-f18575b332f9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.808194] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8284ed08-d9f0-424a-b027-14e8b785b229 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1823.808549] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquiring lock "ca3d1a73-6f3b-4278-8fe7-03b66f407ba6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1823.808782] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lock "ca3d1a73-6f3b-4278-8fe7-03b66f407ba6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1823.814788] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7d5f0f1-e55f-41c9-9c74-bbae2c918347 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.829965] env[62684]: DEBUG nova.compute.provider_tree [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1823.864950] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lock "6d4061e4-a074-445d-95c5-239014ee87f3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.554s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1823.895023] env[62684]: INFO nova.compute.manager [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Took 41.07 seconds to build instance. [ 1823.948771] env[62684]: DEBUG nova.compute.manager [req-2941a95d-27af-438c-b598-db9700d57b1a req-cfba34b0-0d61-47ba-b451-5e244b072de8 service nova] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Received event network-vif-plugged-e6a11cfd-6369-4dd8-a41b-dd7ef4d49a87 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1823.949057] env[62684]: DEBUG oslo_concurrency.lockutils [req-2941a95d-27af-438c-b598-db9700d57b1a req-cfba34b0-0d61-47ba-b451-5e244b072de8 service nova] Acquiring lock "52839b18-a68a-4ec7-a921-c42454955e82-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1823.949314] env[62684]: DEBUG oslo_concurrency.lockutils [req-2941a95d-27af-438c-b598-db9700d57b1a req-cfba34b0-0d61-47ba-b451-5e244b072de8 service nova] Lock "52839b18-a68a-4ec7-a921-c42454955e82-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1823.949770] env[62684]: DEBUG oslo_concurrency.lockutils [req-2941a95d-27af-438c-b598-db9700d57b1a req-cfba34b0-0d61-47ba-b451-5e244b072de8 service nova] Lock "52839b18-a68a-4ec7-a921-c42454955e82-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1823.950042] env[62684]: DEBUG nova.compute.manager [req-2941a95d-27af-438c-b598-db9700d57b1a req-cfba34b0-0d61-47ba-b451-5e244b072de8 service nova] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] No waiting events found dispatching network-vif-plugged-e6a11cfd-6369-4dd8-a41b-dd7ef4d49a87 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1823.950233] env[62684]: WARNING nova.compute.manager [req-2941a95d-27af-438c-b598-db9700d57b1a req-cfba34b0-0d61-47ba-b451-5e244b072de8 service nova] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Received unexpected event network-vif-plugged-e6a11cfd-6369-4dd8-a41b-dd7ef4d49a87 for instance with vm_state building and task_state spawning. [ 1824.077169] env[62684]: DEBUG nova.network.neutron [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Successfully updated port: e6a11cfd-6369-4dd8-a41b-dd7ef4d49a87 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1824.127126] env[62684]: DEBUG oslo_vmware.api [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052582, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.351941] env[62684]: ERROR nova.scheduler.client.report [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [req-c5979b13-a8c8-4243-9202-0a51b05f1ef4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c23c281e-ec1f-4876-972e-a98655f2084f. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c5979b13-a8c8-4243-9202-0a51b05f1ef4"}]} [ 1824.358624] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquiring lock "02dc8c41-5092-4f84-9722-37d4df3a459a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1824.358870] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Lock "02dc8c41-5092-4f84-9722-37d4df3a459a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1824.367615] env[62684]: DEBUG nova.scheduler.client.report [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Refreshing inventories for resource provider c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1824.369639] env[62684]: DEBUG nova.compute.manager [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1824.386126] env[62684]: DEBUG nova.scheduler.client.report [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Updating ProviderTree inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1824.386418] env[62684]: DEBUG nova.compute.provider_tree [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1824.396591] env[62684]: DEBUG oslo_concurrency.lockutils [None req-856bbec3-9e29-4807-83b9-ad4ed9ff4e00 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Lock "3a172e9f-9f79-489e-9571-80bd74ad8609" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.524s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1824.399136] env[62684]: DEBUG nova.scheduler.client.report [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Refreshing aggregate associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, aggregates: None {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1824.417711] env[62684]: DEBUG nova.scheduler.client.report [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Refreshing trait associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1824.580526] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquiring lock "refresh_cache-52839b18-a68a-4ec7-a921-c42454955e82" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1824.580526] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquired lock "refresh_cache-52839b18-a68a-4ec7-a921-c42454955e82" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1824.580526] env[62684]: DEBUG nova.network.neutron [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1824.630836] env[62684]: DEBUG oslo_vmware.api [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052582, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.882680] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecff4b58-cfe2-4f54-a3cc-0980133ed479 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.891296] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3930b38-179b-40a5-ac46-a09f86a7016d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.896230] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1824.925524] env[62684]: DEBUG nova.compute.manager [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1824.928498] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d536ec6-71ba-447a-bebc-b2830b3a35e6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.937275] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3b149ef-b243-4e55-9fb0-b626ac794e1c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.951897] env[62684]: DEBUG nova.compute.provider_tree [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1825.132914] env[62684]: DEBUG oslo_vmware.api [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052582, 'name': CloneVM_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.154010] env[62684]: DEBUG nova.network.neutron [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1825.449768] env[62684]: DEBUG nova.network.neutron [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Updating instance_info_cache with network_info: [{"id": "e6a11cfd-6369-4dd8-a41b-dd7ef4d49a87", "address": "fa:16:3e:9a:74:58", "network": {"id": "95ad5a29-9716-4b0c-937b-33a498c74ef7", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-342262548-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61c83953d09c4d1c97eee5a8679c30d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c68b7663-4f0e-47f0-ac7f-40c6d952f7bb", "external-id": "nsx-vlan-transportzone-696", "segmentation_id": 696, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6a11cfd-63", "ovs_interfaceid": "e6a11cfd-6369-4dd8-a41b-dd7ef4d49a87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1825.455397] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1825.490096] env[62684]: DEBUG nova.scheduler.client.report [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Updated inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f with generation 58 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1825.490741] env[62684]: DEBUG nova.compute.provider_tree [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Updating resource provider c23c281e-ec1f-4876-972e-a98655f2084f generation from 58 to 59 during operation: update_inventory {{(pid=62684) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1825.490741] env[62684]: DEBUG nova.compute.provider_tree [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1825.633663] env[62684]: DEBUG oslo_vmware.api [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052582, 'name': CloneVM_Task, 'duration_secs': 1.606124} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.634805] env[62684]: INFO nova.virt.vmwareapi.vmops [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Created linked-clone VM from snapshot [ 1825.635800] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44ddffaf-f60c-4827-860f-50468e4577b8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.643751] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Uploading image 01216b0e-c832-4bdf-bcff-de5e6e30665f {{(pid=62684) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1825.665843] env[62684]: DEBUG oslo_vmware.rw_handles [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1825.665843] env[62684]: value = "vm-421213" [ 1825.665843] env[62684]: _type = "VirtualMachine" [ 1825.665843] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1825.666169] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-a68b684f-05ff-4a7e-a579-7405b2e29dfa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.673149] env[62684]: DEBUG oslo_vmware.rw_handles [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Lease: (returnval){ [ 1825.673149] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b290b2-526e-6031-9c2c-59be7912dad9" [ 1825.673149] env[62684]: _type = "HttpNfcLease" [ 1825.673149] env[62684]: } obtained for exporting VM: (result){ [ 1825.673149] env[62684]: value = "vm-421213" [ 1825.673149] env[62684]: _type = "VirtualMachine" [ 1825.673149] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1825.673368] env[62684]: DEBUG oslo_vmware.api [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Waiting for the lease: (returnval){ [ 1825.673368] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b290b2-526e-6031-9c2c-59be7912dad9" [ 1825.673368] env[62684]: _type = "HttpNfcLease" [ 1825.673368] env[62684]: } to be ready. {{(pid=62684) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1825.679912] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1825.679912] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b290b2-526e-6031-9c2c-59be7912dad9" [ 1825.679912] env[62684]: _type = "HttpNfcLease" [ 1825.679912] env[62684]: } is initializing. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1825.952846] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Releasing lock "refresh_cache-52839b18-a68a-4ec7-a921-c42454955e82" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1825.953109] env[62684]: DEBUG nova.compute.manager [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Instance network_info: |[{"id": "e6a11cfd-6369-4dd8-a41b-dd7ef4d49a87", "address": "fa:16:3e:9a:74:58", "network": {"id": "95ad5a29-9716-4b0c-937b-33a498c74ef7", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-342262548-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61c83953d09c4d1c97eee5a8679c30d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c68b7663-4f0e-47f0-ac7f-40c6d952f7bb", "external-id": "nsx-vlan-transportzone-696", "segmentation_id": 696, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6a11cfd-63", "ovs_interfaceid": "e6a11cfd-6369-4dd8-a41b-dd7ef4d49a87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1825.953479] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9a:74:58', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c68b7663-4f0e-47f0-ac7f-40c6d952f7bb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e6a11cfd-6369-4dd8-a41b-dd7ef4d49a87', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1825.961084] env[62684]: DEBUG oslo.service.loopingcall [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1825.961320] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1825.961547] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-61e40f28-c157-47c9-8367-6c2dd26c76f4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.981236] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1825.981236] env[62684]: value = "task-2052584" [ 1825.981236] env[62684]: _type = "Task" [ 1825.981236] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.988928] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052584, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.996028] env[62684]: DEBUG oslo_concurrency.lockutils [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 5.073s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1825.996419] env[62684]: DEBUG nova.compute.manager [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1826.000173] env[62684]: DEBUG oslo_concurrency.lockutils [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.088s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1826.001090] env[62684]: INFO nova.compute.claims [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1826.013150] env[62684]: DEBUG nova.compute.manager [req-bad905c7-d4c2-40cf-a4df-a49fe0912e83 req-9cc3b877-49e3-413f-ab2b-47ad7450f593 service nova] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Received event network-changed-e6a11cfd-6369-4dd8-a41b-dd7ef4d49a87 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1826.013380] env[62684]: DEBUG nova.compute.manager [req-bad905c7-d4c2-40cf-a4df-a49fe0912e83 req-9cc3b877-49e3-413f-ab2b-47ad7450f593 service nova] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Refreshing instance network info cache due to event network-changed-e6a11cfd-6369-4dd8-a41b-dd7ef4d49a87. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1826.013640] env[62684]: DEBUG oslo_concurrency.lockutils [req-bad905c7-d4c2-40cf-a4df-a49fe0912e83 req-9cc3b877-49e3-413f-ab2b-47ad7450f593 service nova] Acquiring lock "refresh_cache-52839b18-a68a-4ec7-a921-c42454955e82" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1826.013791] env[62684]: DEBUG oslo_concurrency.lockutils [req-bad905c7-d4c2-40cf-a4df-a49fe0912e83 req-9cc3b877-49e3-413f-ab2b-47ad7450f593 service nova] Acquired lock "refresh_cache-52839b18-a68a-4ec7-a921-c42454955e82" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1826.013956] env[62684]: DEBUG nova.network.neutron [req-bad905c7-d4c2-40cf-a4df-a49fe0912e83 req-9cc3b877-49e3-413f-ab2b-47ad7450f593 service nova] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Refreshing network info cache for port e6a11cfd-6369-4dd8-a41b-dd7ef4d49a87 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1826.182324] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1826.182324] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b290b2-526e-6031-9c2c-59be7912dad9" [ 1826.182324] env[62684]: _type = "HttpNfcLease" [ 1826.182324] env[62684]: } is ready. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1826.182616] env[62684]: DEBUG oslo_vmware.rw_handles [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1826.182616] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b290b2-526e-6031-9c2c-59be7912dad9" [ 1826.182616] env[62684]: _type = "HttpNfcLease" [ 1826.182616] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1826.183352] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99aa82d9-f657-4da8-9ac9-00e09352a521 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.190491] env[62684]: DEBUG oslo_vmware.rw_handles [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bb9436-dd71-2af5-9c9a-6539cdecc7e2/disk-0.vmdk from lease info. {{(pid=62684) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1826.190673] env[62684]: DEBUG oslo_vmware.rw_handles [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bb9436-dd71-2af5-9c9a-6539cdecc7e2/disk-0.vmdk for reading. {{(pid=62684) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1826.342341] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-7f901aac-6f70-420b-87c7-e3e837879129 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.491025] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052584, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.506029] env[62684]: DEBUG nova.compute.utils [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1826.509986] env[62684]: DEBUG nova.compute.manager [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1826.510174] env[62684]: DEBUG nova.network.neutron [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1826.598662] env[62684]: DEBUG nova.policy [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0c9327f7394249948899bf76e1837d36', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7855def9d0aa49abb7003ee504b9ccaf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1826.748159] env[62684]: DEBUG nova.network.neutron [req-bad905c7-d4c2-40cf-a4df-a49fe0912e83 req-9cc3b877-49e3-413f-ab2b-47ad7450f593 service nova] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Updated VIF entry in instance network info cache for port e6a11cfd-6369-4dd8-a41b-dd7ef4d49a87. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1826.748558] env[62684]: DEBUG nova.network.neutron [req-bad905c7-d4c2-40cf-a4df-a49fe0912e83 req-9cc3b877-49e3-413f-ab2b-47ad7450f593 service nova] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Updating instance_info_cache with network_info: [{"id": "e6a11cfd-6369-4dd8-a41b-dd7ef4d49a87", "address": "fa:16:3e:9a:74:58", "network": {"id": "95ad5a29-9716-4b0c-937b-33a498c74ef7", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-342262548-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61c83953d09c4d1c97eee5a8679c30d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c68b7663-4f0e-47f0-ac7f-40c6d952f7bb", "external-id": "nsx-vlan-transportzone-696", "segmentation_id": 696, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6a11cfd-63", "ovs_interfaceid": "e6a11cfd-6369-4dd8-a41b-dd7ef4d49a87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1826.996672] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052584, 'name': CreateVM_Task, 'duration_secs': 0.685179} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.997093] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1826.997862] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1826.998116] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1826.998673] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1826.999571] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a27e962-a2ce-4c96-98b2-e2f8f0a3e249 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.007202] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for the task: (returnval){ [ 1827.007202] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523f578d-c0e9-b105-8fb8-964b611b1d1b" [ 1827.007202] env[62684]: _type = "Task" [ 1827.007202] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.014307] env[62684]: DEBUG nova.compute.manager [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1827.022965] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523f578d-c0e9-b105-8fb8-964b611b1d1b, 'name': SearchDatastore_Task, 'duration_secs': 0.010695} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.023336] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1827.023643] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1827.023928] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1827.024267] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1827.024506] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1827.024833] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f31ee4a7-31cd-424d-ae7b-122f71726327 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.032881] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1827.033086] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1827.033829] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-071ce78b-97cb-4430-9ab0-788a09e015a6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.042353] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for the task: (returnval){ [ 1827.042353] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ece9d5-a7ef-e33b-19cd-473f32e8e68d" [ 1827.042353] env[62684]: _type = "Task" [ 1827.042353] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.054579] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ece9d5-a7ef-e33b-19cd-473f32e8e68d, 'name': SearchDatastore_Task, 'duration_secs': 0.0109} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.055789] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d3d1534-159b-45a4-98b3-0ad6652e758c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.061796] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for the task: (returnval){ [ 1827.061796] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52eb1f6b-d1f7-993a-2f45-3acab4b2a820" [ 1827.061796] env[62684]: _type = "Task" [ 1827.061796] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.075739] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52eb1f6b-d1f7-993a-2f45-3acab4b2a820, 'name': SearchDatastore_Task, 'duration_secs': 0.010931} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.076562] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1827.076562] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 52839b18-a68a-4ec7-a921-c42454955e82/52839b18-a68a-4ec7-a921-c42454955e82.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1827.076762] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d7420c76-2446-46a4-a754-730a5cb0274c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.087068] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for the task: (returnval){ [ 1827.087068] env[62684]: value = "task-2052585" [ 1827.087068] env[62684]: _type = "Task" [ 1827.087068] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.095670] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052585, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.171678] env[62684]: DEBUG nova.network.neutron [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Successfully created port: fcd83399-5ab1-469a-9bbf-c150314f55dd {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1827.251373] env[62684]: DEBUG oslo_concurrency.lockutils [req-bad905c7-d4c2-40cf-a4df-a49fe0912e83 req-9cc3b877-49e3-413f-ab2b-47ad7450f593 service nova] Releasing lock "refresh_cache-52839b18-a68a-4ec7-a921-c42454955e82" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1827.251670] env[62684]: DEBUG nova.compute.manager [req-bad905c7-d4c2-40cf-a4df-a49fe0912e83 req-9cc3b877-49e3-413f-ab2b-47ad7450f593 service nova] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Received event network-changed-8f6b3e69-1998-4808-9c1a-1224c8ab5363 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1827.251849] env[62684]: DEBUG nova.compute.manager [req-bad905c7-d4c2-40cf-a4df-a49fe0912e83 req-9cc3b877-49e3-413f-ab2b-47ad7450f593 service nova] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Refreshing instance network info cache due to event network-changed-8f6b3e69-1998-4808-9c1a-1224c8ab5363. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1827.252094] env[62684]: DEBUG oslo_concurrency.lockutils [req-bad905c7-d4c2-40cf-a4df-a49fe0912e83 req-9cc3b877-49e3-413f-ab2b-47ad7450f593 service nova] Acquiring lock "refresh_cache-6b1f0e69-3915-40dc-b4ec-93ab174f12b6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1827.252248] env[62684]: DEBUG oslo_concurrency.lockutils [req-bad905c7-d4c2-40cf-a4df-a49fe0912e83 req-9cc3b877-49e3-413f-ab2b-47ad7450f593 service nova] Acquired lock "refresh_cache-6b1f0e69-3915-40dc-b4ec-93ab174f12b6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1827.252415] env[62684]: DEBUG nova.network.neutron [req-bad905c7-d4c2-40cf-a4df-a49fe0912e83 req-9cc3b877-49e3-413f-ab2b-47ad7450f593 service nova] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Refreshing network info cache for port 8f6b3e69-1998-4808-9c1a-1224c8ab5363 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1827.601482] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052585, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.692153] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36c7d5e9-69b6-4aea-8f0c-9f13fd44d363 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.699401] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddea5886-0c14-41c6-8352-50c3fee48715 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.732174] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5262ca77-6b1b-4eb7-8a59-6cb04765295c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.741929] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b74e2233-d690-4f3d-b7a6-a0e75874285c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.762615] env[62684]: DEBUG nova.compute.provider_tree [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1828.024395] env[62684]: DEBUG nova.network.neutron [req-bad905c7-d4c2-40cf-a4df-a49fe0912e83 req-9cc3b877-49e3-413f-ab2b-47ad7450f593 service nova] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Updated VIF entry in instance network info cache for port 8f6b3e69-1998-4808-9c1a-1224c8ab5363. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1828.024663] env[62684]: DEBUG nova.network.neutron [req-bad905c7-d4c2-40cf-a4df-a49fe0912e83 req-9cc3b877-49e3-413f-ab2b-47ad7450f593 service nova] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Updating instance_info_cache with network_info: [{"id": "8f6b3e69-1998-4808-9c1a-1224c8ab5363", "address": "fa:16:3e:f0:98:53", "network": {"id": "c4f4fd23-e4d0-4e7a-861b-71eeb3008ae5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1003879390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e169ffb4120143dca6d67108986e62f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f6b3e69-19", "ovs_interfaceid": "8f6b3e69-1998-4808-9c1a-1224c8ab5363", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1828.027380] env[62684]: DEBUG nova.compute.manager [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1828.061769] env[62684]: DEBUG nova.virt.hardware [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1828.062009] env[62684]: DEBUG nova.virt.hardware [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1828.062570] env[62684]: DEBUG nova.virt.hardware [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1828.062778] env[62684]: DEBUG nova.virt.hardware [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1828.062933] env[62684]: DEBUG nova.virt.hardware [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1828.063098] env[62684]: DEBUG nova.virt.hardware [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1828.063323] env[62684]: DEBUG nova.virt.hardware [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1828.063496] env[62684]: DEBUG nova.virt.hardware [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1828.063777] env[62684]: DEBUG nova.virt.hardware [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1828.064034] env[62684]: DEBUG nova.virt.hardware [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1828.064507] env[62684]: DEBUG nova.virt.hardware [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1828.065509] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-922e801b-1361-4b3e-af2e-fad9b01cc43a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.077408] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ef0687a-f78b-4ead-aa52-07827cdeddb2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.104923] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052585, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.559385} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1828.106780] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 52839b18-a68a-4ec7-a921-c42454955e82/52839b18-a68a-4ec7-a921-c42454955e82.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1828.107128] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1828.107930] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-22df69f2-6a6a-4f82-aae8-19a4c4aa5754 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.115393] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for the task: (returnval){ [ 1828.115393] env[62684]: value = "task-2052586" [ 1828.115393] env[62684]: _type = "Task" [ 1828.115393] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.123857] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052586, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.266615] env[62684]: DEBUG nova.scheduler.client.report [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1828.432430] env[62684]: DEBUG nova.compute.manager [req-d5239466-d7a0-41c8-b4dd-83550a9c3ede req-07b76272-c30b-4e2f-81e3-d7267023d1a8 service nova] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Received event network-changed-8f6b3e69-1998-4808-9c1a-1224c8ab5363 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1828.432430] env[62684]: DEBUG nova.compute.manager [req-d5239466-d7a0-41c8-b4dd-83550a9c3ede req-07b76272-c30b-4e2f-81e3-d7267023d1a8 service nova] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Refreshing instance network info cache due to event network-changed-8f6b3e69-1998-4808-9c1a-1224c8ab5363. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1828.432430] env[62684]: DEBUG oslo_concurrency.lockutils [req-d5239466-d7a0-41c8-b4dd-83550a9c3ede req-07b76272-c30b-4e2f-81e3-d7267023d1a8 service nova] Acquiring lock "refresh_cache-6b1f0e69-3915-40dc-b4ec-93ab174f12b6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1828.532514] env[62684]: DEBUG oslo_concurrency.lockutils [req-bad905c7-d4c2-40cf-a4df-a49fe0912e83 req-9cc3b877-49e3-413f-ab2b-47ad7450f593 service nova] Releasing lock "refresh_cache-6b1f0e69-3915-40dc-b4ec-93ab174f12b6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1828.532992] env[62684]: DEBUG oslo_concurrency.lockutils [req-d5239466-d7a0-41c8-b4dd-83550a9c3ede req-07b76272-c30b-4e2f-81e3-d7267023d1a8 service nova] Acquired lock "refresh_cache-6b1f0e69-3915-40dc-b4ec-93ab174f12b6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1828.533262] env[62684]: DEBUG nova.network.neutron [req-d5239466-d7a0-41c8-b4dd-83550a9c3ede req-07b76272-c30b-4e2f-81e3-d7267023d1a8 service nova] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Refreshing network info cache for port 8f6b3e69-1998-4808-9c1a-1224c8ab5363 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1828.626831] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052586, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.156193} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1828.627099] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1828.627906] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e977384-b769-4cd2-a860-30b0e4b972bf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.652603] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Reconfiguring VM instance instance-0000001f to attach disk [datastore2] 52839b18-a68a-4ec7-a921-c42454955e82/52839b18-a68a-4ec7-a921-c42454955e82.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1828.652986] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6cd23926-57ab-49cc-91d9-b8dabcaac57b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.675020] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for the task: (returnval){ [ 1828.675020] env[62684]: value = "task-2052587" [ 1828.675020] env[62684]: _type = "Task" [ 1828.675020] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.683894] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052587, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.773195] env[62684]: DEBUG oslo_concurrency.lockutils [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.774s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1828.773782] env[62684]: DEBUG nova.compute.manager [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1828.776930] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.780s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1828.778618] env[62684]: INFO nova.compute.claims [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1829.185362] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052587, 'name': ReconfigVM_Task, 'duration_secs': 0.481976} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1829.185723] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Reconfigured VM instance instance-0000001f to attach disk [datastore2] 52839b18-a68a-4ec7-a921-c42454955e82/52839b18-a68a-4ec7-a921-c42454955e82.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1829.186557] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-04469308-67d6-496d-9169-d1a301fca801 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.194861] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for the task: (returnval){ [ 1829.194861] env[62684]: value = "task-2052588" [ 1829.194861] env[62684]: _type = "Task" [ 1829.194861] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1829.214164] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052588, 'name': Rename_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.283581] env[62684]: DEBUG nova.compute.utils [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1829.285679] env[62684]: DEBUG nova.network.neutron [req-d5239466-d7a0-41c8-b4dd-83550a9c3ede req-07b76272-c30b-4e2f-81e3-d7267023d1a8 service nova] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Updated VIF entry in instance network info cache for port 8f6b3e69-1998-4808-9c1a-1224c8ab5363. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1829.285998] env[62684]: DEBUG nova.network.neutron [req-d5239466-d7a0-41c8-b4dd-83550a9c3ede req-07b76272-c30b-4e2f-81e3-d7267023d1a8 service nova] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Updating instance_info_cache with network_info: [{"id": "8f6b3e69-1998-4808-9c1a-1224c8ab5363", "address": "fa:16:3e:f0:98:53", "network": {"id": "c4f4fd23-e4d0-4e7a-861b-71eeb3008ae5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1003879390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e169ffb4120143dca6d67108986e62f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f6b3e69-19", "ovs_interfaceid": "8f6b3e69-1998-4808-9c1a-1224c8ab5363", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1829.290174] env[62684]: DEBUG nova.compute.manager [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1829.290394] env[62684]: DEBUG nova.network.neutron [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1829.299598] env[62684]: DEBUG nova.compute.manager [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1829.363268] env[62684]: DEBUG nova.policy [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1fba7614b20e4e6280af728ba1dc6fe3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bcd3c908bccd421292836d1cde1fc5e3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1829.611780] env[62684]: DEBUG nova.network.neutron [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Successfully updated port: fcd83399-5ab1-469a-9bbf-c150314f55dd {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1829.649878] env[62684]: DEBUG nova.network.neutron [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Successfully created port: 6f9879b5-4fa4-4afb-a3a5-8ee16a495f70 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1829.706081] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052588, 'name': Rename_Task, 'duration_secs': 0.233269} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1829.706475] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1829.706758] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bc4a37e3-0c78-4bbe-860d-8dbb168704d6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.714779] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for the task: (returnval){ [ 1829.714779] env[62684]: value = "task-2052589" [ 1829.714779] env[62684]: _type = "Task" [ 1829.714779] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1829.724301] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052589, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.798675] env[62684]: DEBUG oslo_concurrency.lockutils [req-d5239466-d7a0-41c8-b4dd-83550a9c3ede req-07b76272-c30b-4e2f-81e3-d7267023d1a8 service nova] Releasing lock "refresh_cache-6b1f0e69-3915-40dc-b4ec-93ab174f12b6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1829.798992] env[62684]: DEBUG nova.compute.manager [req-d5239466-d7a0-41c8-b4dd-83550a9c3ede req-07b76272-c30b-4e2f-81e3-d7267023d1a8 service nova] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Received event network-changed-1ce065ea-4c6e-4c34-8b7f-27a3b14d0924 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1829.799187] env[62684]: DEBUG nova.compute.manager [req-d5239466-d7a0-41c8-b4dd-83550a9c3ede req-07b76272-c30b-4e2f-81e3-d7267023d1a8 service nova] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Refreshing instance network info cache due to event network-changed-1ce065ea-4c6e-4c34-8b7f-27a3b14d0924. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1829.799414] env[62684]: DEBUG oslo_concurrency.lockutils [req-d5239466-d7a0-41c8-b4dd-83550a9c3ede req-07b76272-c30b-4e2f-81e3-d7267023d1a8 service nova] Acquiring lock "refresh_cache-3a172e9f-9f79-489e-9571-80bd74ad8609" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1829.799564] env[62684]: DEBUG oslo_concurrency.lockutils [req-d5239466-d7a0-41c8-b4dd-83550a9c3ede req-07b76272-c30b-4e2f-81e3-d7267023d1a8 service nova] Acquired lock "refresh_cache-3a172e9f-9f79-489e-9571-80bd74ad8609" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1829.799945] env[62684]: DEBUG nova.network.neutron [req-d5239466-d7a0-41c8-b4dd-83550a9c3ede req-07b76272-c30b-4e2f-81e3-d7267023d1a8 service nova] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Refreshing network info cache for port 1ce065ea-4c6e-4c34-8b7f-27a3b14d0924 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1830.124130] env[62684]: DEBUG oslo_concurrency.lockutils [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "refresh_cache-e08f8636-5193-40fa-972c-f0ecab193fc1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1830.125900] env[62684]: DEBUG oslo_concurrency.lockutils [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquired lock "refresh_cache-e08f8636-5193-40fa-972c-f0ecab193fc1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1830.125900] env[62684]: DEBUG nova.network.neutron [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1830.226614] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052589, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.321036] env[62684]: DEBUG nova.compute.manager [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1830.349021] env[62684]: DEBUG nova.virt.hardware [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1830.349021] env[62684]: DEBUG nova.virt.hardware [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1830.349021] env[62684]: DEBUG nova.virt.hardware [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1830.349262] env[62684]: DEBUG nova.virt.hardware [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1830.349554] env[62684]: DEBUG nova.virt.hardware [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1830.349848] env[62684]: DEBUG nova.virt.hardware [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1830.350201] env[62684]: DEBUG nova.virt.hardware [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1830.350499] env[62684]: DEBUG nova.virt.hardware [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1830.353024] env[62684]: DEBUG nova.virt.hardware [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1830.353024] env[62684]: DEBUG nova.virt.hardware [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1830.353024] env[62684]: DEBUG nova.virt.hardware [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1830.353024] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee077c1b-34ef-44a5-9c7d-734dcb1f48fa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.361412] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a7b64dd-2e0e-4092-b825-f5082754cdf1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.593160] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b27b238-e222-450c-85bf-4c7d2d97d25c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.601539] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7488b39-f9c6-45e1-902c-12a9c0c5ebc8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.635146] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c449f371-97c0-489a-b51a-69610d533fc3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.643370] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfc8e40d-bdbd-419d-91c5-ff1bdd2eab46 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.657939] env[62684]: DEBUG nova.compute.provider_tree [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1830.693832] env[62684]: DEBUG nova.network.neutron [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1830.727893] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052589, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.782534] env[62684]: DEBUG nova.network.neutron [req-d5239466-d7a0-41c8-b4dd-83550a9c3ede req-07b76272-c30b-4e2f-81e3-d7267023d1a8 service nova] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Updated VIF entry in instance network info cache for port 1ce065ea-4c6e-4c34-8b7f-27a3b14d0924. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1830.783114] env[62684]: DEBUG nova.network.neutron [req-d5239466-d7a0-41c8-b4dd-83550a9c3ede req-07b76272-c30b-4e2f-81e3-d7267023d1a8 service nova] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Updating instance_info_cache with network_info: [{"id": "1ce065ea-4c6e-4c34-8b7f-27a3b14d0924", "address": "fa:16:3e:10:ec:c4", "network": {"id": "c4f4fd23-e4d0-4e7a-861b-71eeb3008ae5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1003879390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e169ffb4120143dca6d67108986e62f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ce065ea-4c", "ovs_interfaceid": "1ce065ea-4c6e-4c34-8b7f-27a3b14d0924", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1830.796390] env[62684]: DEBUG nova.compute.manager [req-ad8398d2-fbdb-43fb-b828-e7e5880ad19c req-08799e3a-528c-4f82-94e0-12707a30a934 service nova] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Received event network-vif-plugged-fcd83399-5ab1-469a-9bbf-c150314f55dd {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1830.796390] env[62684]: DEBUG oslo_concurrency.lockutils [req-ad8398d2-fbdb-43fb-b828-e7e5880ad19c req-08799e3a-528c-4f82-94e0-12707a30a934 service nova] Acquiring lock "e08f8636-5193-40fa-972c-f0ecab193fc1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1830.796390] env[62684]: DEBUG oslo_concurrency.lockutils [req-ad8398d2-fbdb-43fb-b828-e7e5880ad19c req-08799e3a-528c-4f82-94e0-12707a30a934 service nova] Lock "e08f8636-5193-40fa-972c-f0ecab193fc1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1830.796390] env[62684]: DEBUG oslo_concurrency.lockutils [req-ad8398d2-fbdb-43fb-b828-e7e5880ad19c req-08799e3a-528c-4f82-94e0-12707a30a934 service nova] Lock "e08f8636-5193-40fa-972c-f0ecab193fc1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1830.796390] env[62684]: DEBUG nova.compute.manager [req-ad8398d2-fbdb-43fb-b828-e7e5880ad19c req-08799e3a-528c-4f82-94e0-12707a30a934 service nova] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] No waiting events found dispatching network-vif-plugged-fcd83399-5ab1-469a-9bbf-c150314f55dd {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1830.796660] env[62684]: WARNING nova.compute.manager [req-ad8398d2-fbdb-43fb-b828-e7e5880ad19c req-08799e3a-528c-4f82-94e0-12707a30a934 service nova] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Received unexpected event network-vif-plugged-fcd83399-5ab1-469a-9bbf-c150314f55dd for instance with vm_state building and task_state spawning. [ 1830.796660] env[62684]: DEBUG nova.compute.manager [req-ad8398d2-fbdb-43fb-b828-e7e5880ad19c req-08799e3a-528c-4f82-94e0-12707a30a934 service nova] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Received event network-changed-fcd83399-5ab1-469a-9bbf-c150314f55dd {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1830.796660] env[62684]: DEBUG nova.compute.manager [req-ad8398d2-fbdb-43fb-b828-e7e5880ad19c req-08799e3a-528c-4f82-94e0-12707a30a934 service nova] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Refreshing instance network info cache due to event network-changed-fcd83399-5ab1-469a-9bbf-c150314f55dd. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1830.796660] env[62684]: DEBUG oslo_concurrency.lockutils [req-ad8398d2-fbdb-43fb-b828-e7e5880ad19c req-08799e3a-528c-4f82-94e0-12707a30a934 service nova] Acquiring lock "refresh_cache-e08f8636-5193-40fa-972c-f0ecab193fc1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1831.012705] env[62684]: DEBUG nova.network.neutron [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Updating instance_info_cache with network_info: [{"id": "fcd83399-5ab1-469a-9bbf-c150314f55dd", "address": "fa:16:3e:60:96:30", "network": {"id": "2fa98fa4-ff7c-44e6-add0-693f55fd4b03", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2019954029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7855def9d0aa49abb7003ee504b9ccaf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcd83399-5a", "ovs_interfaceid": "fcd83399-5ab1-469a-9bbf-c150314f55dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1831.166495] env[62684]: DEBUG nova.scheduler.client.report [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1831.177633] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b79826dc-dd2b-41dc-933a-2141b6a4d834 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Acquiring lock "3a172e9f-9f79-489e-9571-80bd74ad8609" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1831.177633] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b79826dc-dd2b-41dc-933a-2141b6a4d834 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Lock "3a172e9f-9f79-489e-9571-80bd74ad8609" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1831.177633] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b79826dc-dd2b-41dc-933a-2141b6a4d834 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Acquiring lock "3a172e9f-9f79-489e-9571-80bd74ad8609-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1831.177633] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b79826dc-dd2b-41dc-933a-2141b6a4d834 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Lock "3a172e9f-9f79-489e-9571-80bd74ad8609-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1831.177882] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b79826dc-dd2b-41dc-933a-2141b6a4d834 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Lock "3a172e9f-9f79-489e-9571-80bd74ad8609-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1831.183403] env[62684]: INFO nova.compute.manager [None req-b79826dc-dd2b-41dc-933a-2141b6a4d834 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Terminating instance [ 1831.184443] env[62684]: DEBUG nova.compute.manager [None req-b79826dc-dd2b-41dc-933a-2141b6a4d834 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1831.187018] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b79826dc-dd2b-41dc-933a-2141b6a4d834 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1831.187018] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f788c71-93d1-416e-b073-1572d4a6991f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.195085] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-b79826dc-dd2b-41dc-933a-2141b6a4d834 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1831.195085] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2ac461c0-ffcc-49ed-b98e-8a618e2f1c76 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.205758] env[62684]: DEBUG nova.network.neutron [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Successfully updated port: 6f9879b5-4fa4-4afb-a3a5-8ee16a495f70 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1831.209455] env[62684]: DEBUG oslo_vmware.api [None req-b79826dc-dd2b-41dc-933a-2141b6a4d834 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Waiting for the task: (returnval){ [ 1831.209455] env[62684]: value = "task-2052590" [ 1831.209455] env[62684]: _type = "Task" [ 1831.209455] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1831.222432] env[62684]: DEBUG oslo_vmware.api [None req-b79826dc-dd2b-41dc-933a-2141b6a4d834 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': task-2052590, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.234245] env[62684]: DEBUG oslo_vmware.api [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052589, 'name': PowerOnVM_Task, 'duration_secs': 1.22797} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1831.235071] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1831.235071] env[62684]: INFO nova.compute.manager [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Took 8.28 seconds to spawn the instance on the hypervisor. [ 1831.235071] env[62684]: DEBUG nova.compute.manager [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1831.236049] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8275abc4-0972-4300-a674-55a6e96765cf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.286506] env[62684]: DEBUG oslo_concurrency.lockutils [req-d5239466-d7a0-41c8-b4dd-83550a9c3ede req-07b76272-c30b-4e2f-81e3-d7267023d1a8 service nova] Releasing lock "refresh_cache-3a172e9f-9f79-489e-9571-80bd74ad8609" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1831.289272] env[62684]: DEBUG nova.compute.manager [req-e7c7dd2e-3a98-4e4f-a124-90a1819c749b req-b99a3d60-a432-4888-be51-fb352f0d5293 service nova] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Received event network-vif-plugged-6f9879b5-4fa4-4afb-a3a5-8ee16a495f70 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1831.289531] env[62684]: DEBUG oslo_concurrency.lockutils [req-e7c7dd2e-3a98-4e4f-a124-90a1819c749b req-b99a3d60-a432-4888-be51-fb352f0d5293 service nova] Acquiring lock "b788c51b-367b-4eef-93d2-faa8836469b6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1831.289752] env[62684]: DEBUG oslo_concurrency.lockutils [req-e7c7dd2e-3a98-4e4f-a124-90a1819c749b req-b99a3d60-a432-4888-be51-fb352f0d5293 service nova] Lock "b788c51b-367b-4eef-93d2-faa8836469b6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1831.289939] env[62684]: DEBUG oslo_concurrency.lockutils [req-e7c7dd2e-3a98-4e4f-a124-90a1819c749b req-b99a3d60-a432-4888-be51-fb352f0d5293 service nova] Lock "b788c51b-367b-4eef-93d2-faa8836469b6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1831.290108] env[62684]: DEBUG nova.compute.manager [req-e7c7dd2e-3a98-4e4f-a124-90a1819c749b req-b99a3d60-a432-4888-be51-fb352f0d5293 service nova] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] No waiting events found dispatching network-vif-plugged-6f9879b5-4fa4-4afb-a3a5-8ee16a495f70 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1831.290275] env[62684]: WARNING nova.compute.manager [req-e7c7dd2e-3a98-4e4f-a124-90a1819c749b req-b99a3d60-a432-4888-be51-fb352f0d5293 service nova] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Received unexpected event network-vif-plugged-6f9879b5-4fa4-4afb-a3a5-8ee16a495f70 for instance with vm_state building and task_state spawning. [ 1831.515812] env[62684]: DEBUG oslo_concurrency.lockutils [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Releasing lock "refresh_cache-e08f8636-5193-40fa-972c-f0ecab193fc1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1831.516178] env[62684]: DEBUG nova.compute.manager [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Instance network_info: |[{"id": "fcd83399-5ab1-469a-9bbf-c150314f55dd", "address": "fa:16:3e:60:96:30", "network": {"id": "2fa98fa4-ff7c-44e6-add0-693f55fd4b03", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2019954029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7855def9d0aa49abb7003ee504b9ccaf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcd83399-5a", "ovs_interfaceid": "fcd83399-5ab1-469a-9bbf-c150314f55dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1831.516505] env[62684]: DEBUG oslo_concurrency.lockutils [req-ad8398d2-fbdb-43fb-b828-e7e5880ad19c req-08799e3a-528c-4f82-94e0-12707a30a934 service nova] Acquired lock "refresh_cache-e08f8636-5193-40fa-972c-f0ecab193fc1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1831.516688] env[62684]: DEBUG nova.network.neutron [req-ad8398d2-fbdb-43fb-b828-e7e5880ad19c req-08799e3a-528c-4f82-94e0-12707a30a934 service nova] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Refreshing network info cache for port fcd83399-5ab1-469a-9bbf-c150314f55dd {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1831.518108] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:60:96:30', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0cd5d325-3053-407e-a4ee-f627e82a23f9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fcd83399-5ab1-469a-9bbf-c150314f55dd', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1831.526825] env[62684]: DEBUG oslo.service.loopingcall [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1831.527915] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1831.528212] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d7087b00-940b-4a0d-96f7-7a1921228d42 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.550493] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1831.550493] env[62684]: value = "task-2052591" [ 1831.550493] env[62684]: _type = "Task" [ 1831.550493] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1831.559699] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052591, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.670035] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.893s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1831.670882] env[62684]: DEBUG nova.compute.manager [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1831.673655] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.794s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1831.675250] env[62684]: INFO nova.compute.claims [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1831.712733] env[62684]: DEBUG oslo_concurrency.lockutils [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Acquiring lock "refresh_cache-b788c51b-367b-4eef-93d2-faa8836469b6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1831.712733] env[62684]: DEBUG oslo_concurrency.lockutils [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Acquired lock "refresh_cache-b788c51b-367b-4eef-93d2-faa8836469b6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1831.712892] env[62684]: DEBUG nova.network.neutron [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1831.719153] env[62684]: DEBUG oslo_vmware.api [None req-b79826dc-dd2b-41dc-933a-2141b6a4d834 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': task-2052590, 'name': PowerOffVM_Task, 'duration_secs': 0.245466} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1831.719153] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-b79826dc-dd2b-41dc-933a-2141b6a4d834 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1831.719153] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b79826dc-dd2b-41dc-933a-2141b6a4d834 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1831.719750] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f0af74db-433c-44fd-b220-1cd28071b859 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.757706] env[62684]: INFO nova.compute.manager [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Took 39.62 seconds to build instance. [ 1831.796162] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b79826dc-dd2b-41dc-933a-2141b6a4d834 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1831.796461] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b79826dc-dd2b-41dc-933a-2141b6a4d834 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1831.796661] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-b79826dc-dd2b-41dc-933a-2141b6a4d834 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Deleting the datastore file [datastore1] 3a172e9f-9f79-489e-9571-80bd74ad8609 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1831.796957] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-651e6200-2ede-484c-a279-093381dcc1a4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.804834] env[62684]: DEBUG oslo_vmware.api [None req-b79826dc-dd2b-41dc-933a-2141b6a4d834 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Waiting for the task: (returnval){ [ 1831.804834] env[62684]: value = "task-2052593" [ 1831.804834] env[62684]: _type = "Task" [ 1831.804834] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1831.814917] env[62684]: DEBUG oslo_vmware.api [None req-b79826dc-dd2b-41dc-933a-2141b6a4d834 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': task-2052593, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.064157] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052591, 'name': CreateVM_Task, 'duration_secs': 0.468609} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.064350] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1832.065111] env[62684]: DEBUG oslo_concurrency.lockutils [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1832.065329] env[62684]: DEBUG oslo_concurrency.lockutils [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1832.065679] env[62684]: DEBUG oslo_concurrency.lockutils [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1832.066461] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d31d9c99-8060-42e9-8650-2bdb846f8fc5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.071172] env[62684]: DEBUG oslo_vmware.api [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1832.071172] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52868072-e663-7c24-12c2-e577448d339b" [ 1832.071172] env[62684]: _type = "Task" [ 1832.071172] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1832.082656] env[62684]: DEBUG oslo_vmware.api [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52868072-e663-7c24-12c2-e577448d339b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.180262] env[62684]: DEBUG nova.compute.utils [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1832.191309] env[62684]: DEBUG nova.compute.manager [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1832.191499] env[62684]: DEBUG nova.network.neutron [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] [instance: d06f3099-d05f-417f-a71a-7b368590624f] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1832.258548] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6d1955ef-ab8f-4fb4-bed7-64b45ac8de04 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lock "52839b18-a68a-4ec7-a921-c42454955e82" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.903s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1832.272184] env[62684]: DEBUG nova.network.neutron [req-ad8398d2-fbdb-43fb-b828-e7e5880ad19c req-08799e3a-528c-4f82-94e0-12707a30a934 service nova] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Updated VIF entry in instance network info cache for port fcd83399-5ab1-469a-9bbf-c150314f55dd. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1832.272580] env[62684]: DEBUG nova.network.neutron [req-ad8398d2-fbdb-43fb-b828-e7e5880ad19c req-08799e3a-528c-4f82-94e0-12707a30a934 service nova] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Updating instance_info_cache with network_info: [{"id": "fcd83399-5ab1-469a-9bbf-c150314f55dd", "address": "fa:16:3e:60:96:30", "network": {"id": "2fa98fa4-ff7c-44e6-add0-693f55fd4b03", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2019954029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7855def9d0aa49abb7003ee504b9ccaf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcd83399-5a", "ovs_interfaceid": "fcd83399-5ab1-469a-9bbf-c150314f55dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1832.288537] env[62684]: DEBUG nova.network.neutron [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1832.324347] env[62684]: DEBUG oslo_vmware.api [None req-b79826dc-dd2b-41dc-933a-2141b6a4d834 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': task-2052593, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.215938} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.327899] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-b79826dc-dd2b-41dc-933a-2141b6a4d834 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1832.328275] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b79826dc-dd2b-41dc-933a-2141b6a4d834 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1832.328592] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b79826dc-dd2b-41dc-933a-2141b6a4d834 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1832.328887] env[62684]: INFO nova.compute.manager [None req-b79826dc-dd2b-41dc-933a-2141b6a4d834 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1832.329338] env[62684]: DEBUG oslo.service.loopingcall [None req-b79826dc-dd2b-41dc-933a-2141b6a4d834 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1832.331236] env[62684]: DEBUG nova.policy [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7a41dbd8c4af41c8bf7a051942b5633a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '001dd1d6b30145b69e13ef73a58be713', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1832.333129] env[62684]: DEBUG nova.compute.manager [-] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1832.333265] env[62684]: DEBUG nova.network.neutron [-] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1832.591099] env[62684]: DEBUG oslo_vmware.api [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52868072-e663-7c24-12c2-e577448d339b, 'name': SearchDatastore_Task, 'duration_secs': 0.012623} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.591425] env[62684]: DEBUG oslo_concurrency.lockutils [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1832.591671] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1832.591913] env[62684]: DEBUG oslo_concurrency.lockutils [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1832.592099] env[62684]: DEBUG oslo_concurrency.lockutils [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1832.592990] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1832.592990] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-449c32d0-9aa6-4cb3-8837-be158cfa7e9f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.602132] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1832.602390] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1832.605892] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-032ebe7e-3bcd-4770-99eb-30007b083203 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.611919] env[62684]: DEBUG oslo_vmware.api [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1832.611919] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529f01da-d2cc-9b27-e7cb-fce15e120cf8" [ 1832.611919] env[62684]: _type = "Task" [ 1832.611919] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1832.623333] env[62684]: DEBUG oslo_vmware.api [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529f01da-d2cc-9b27-e7cb-fce15e120cf8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.692773] env[62684]: DEBUG nova.compute.manager [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1832.699578] env[62684]: DEBUG nova.network.neutron [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Updating instance_info_cache with network_info: [{"id": "6f9879b5-4fa4-4afb-a3a5-8ee16a495f70", "address": "fa:16:3e:e8:c1:b8", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.233", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f9879b5-4f", "ovs_interfaceid": "6f9879b5-4fa4-4afb-a3a5-8ee16a495f70", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1832.762153] env[62684]: DEBUG nova.compute.manager [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1832.775492] env[62684]: DEBUG oslo_concurrency.lockutils [req-ad8398d2-fbdb-43fb-b828-e7e5880ad19c req-08799e3a-528c-4f82-94e0-12707a30a934 service nova] Releasing lock "refresh_cache-e08f8636-5193-40fa-972c-f0ecab193fc1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1832.775846] env[62684]: DEBUG nova.compute.manager [req-ad8398d2-fbdb-43fb-b828-e7e5880ad19c req-08799e3a-528c-4f82-94e0-12707a30a934 service nova] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Received event network-changed-1ce065ea-4c6e-4c34-8b7f-27a3b14d0924 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1832.776040] env[62684]: DEBUG nova.compute.manager [req-ad8398d2-fbdb-43fb-b828-e7e5880ad19c req-08799e3a-528c-4f82-94e0-12707a30a934 service nova] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Refreshing instance network info cache due to event network-changed-1ce065ea-4c6e-4c34-8b7f-27a3b14d0924. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1832.776286] env[62684]: DEBUG oslo_concurrency.lockutils [req-ad8398d2-fbdb-43fb-b828-e7e5880ad19c req-08799e3a-528c-4f82-94e0-12707a30a934 service nova] Acquiring lock "refresh_cache-3a172e9f-9f79-489e-9571-80bd74ad8609" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1832.776402] env[62684]: DEBUG oslo_concurrency.lockutils [req-ad8398d2-fbdb-43fb-b828-e7e5880ad19c req-08799e3a-528c-4f82-94e0-12707a30a934 service nova] Acquired lock "refresh_cache-3a172e9f-9f79-489e-9571-80bd74ad8609" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1832.776780] env[62684]: DEBUG nova.network.neutron [req-ad8398d2-fbdb-43fb-b828-e7e5880ad19c req-08799e3a-528c-4f82-94e0-12707a30a934 service nova] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Refreshing network info cache for port 1ce065ea-4c6e-4c34-8b7f-27a3b14d0924 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1833.110932] env[62684]: DEBUG nova.network.neutron [-] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1833.127519] env[62684]: DEBUG oslo_vmware.api [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529f01da-d2cc-9b27-e7cb-fce15e120cf8, 'name': SearchDatastore_Task, 'duration_secs': 0.011531} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1833.128415] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c9de448-8b25-4426-a42e-c94570f8b76f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.137393] env[62684]: DEBUG oslo_vmware.api [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1833.137393] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523efdb8-0d5c-26f8-1155-a7f82fb9321c" [ 1833.137393] env[62684]: _type = "Task" [ 1833.137393] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1833.149795] env[62684]: DEBUG oslo_vmware.api [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523efdb8-0d5c-26f8-1155-a7f82fb9321c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.217913] env[62684]: DEBUG oslo_concurrency.lockutils [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Releasing lock "refresh_cache-b788c51b-367b-4eef-93d2-faa8836469b6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1833.218310] env[62684]: DEBUG nova.compute.manager [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Instance network_info: |[{"id": "6f9879b5-4fa4-4afb-a3a5-8ee16a495f70", "address": "fa:16:3e:e8:c1:b8", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.233", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f9879b5-4f", "ovs_interfaceid": "6f9879b5-4fa4-4afb-a3a5-8ee16a495f70", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1833.225563] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e8:c1:b8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ded18042-834c-4792-b3e8-b1c377446432', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6f9879b5-4fa4-4afb-a3a5-8ee16a495f70', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1833.235123] env[62684]: DEBUG oslo.service.loopingcall [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1833.235659] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1833.237521] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-493b9e0a-4cac-4469-86b1-e621b6cf3937 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.261032] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1833.261032] env[62684]: value = "task-2052594" [ 1833.261032] env[62684]: _type = "Task" [ 1833.261032] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1833.272448] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052594, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.288636] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55aac63b-f104-456e-83d8-ec7856413154 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.292117] env[62684]: DEBUG oslo_concurrency.lockutils [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1833.297485] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4681df53-ec0e-4b78-b18d-c015f9a39eca {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.329943] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7593ef67-9949-4fef-a70c-391b54621b98 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.338531] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4805a1a7-497d-4a21-b6fe-93bd4bf806c7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.354866] env[62684]: DEBUG nova.compute.provider_tree [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1833.362436] env[62684]: INFO nova.network.neutron [req-ad8398d2-fbdb-43fb-b828-e7e5880ad19c req-08799e3a-528c-4f82-94e0-12707a30a934 service nova] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Port 1ce065ea-4c6e-4c34-8b7f-27a3b14d0924 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1833.362877] env[62684]: DEBUG nova.network.neutron [req-ad8398d2-fbdb-43fb-b828-e7e5880ad19c req-08799e3a-528c-4f82-94e0-12707a30a934 service nova] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1833.414499] env[62684]: DEBUG nova.network.neutron [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Successfully created port: f99cf594-91e3-4f98-85ce-def4475f0620 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1833.425082] env[62684]: DEBUG oslo_concurrency.lockutils [None req-35ad379d-d778-477c-a6f4-0f55a64069c4 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquiring lock "6d4061e4-a074-445d-95c5-239014ee87f3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1833.425446] env[62684]: DEBUG oslo_concurrency.lockutils [None req-35ad379d-d778-477c-a6f4-0f55a64069c4 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lock "6d4061e4-a074-445d-95c5-239014ee87f3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1833.425745] env[62684]: DEBUG oslo_concurrency.lockutils [None req-35ad379d-d778-477c-a6f4-0f55a64069c4 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquiring lock "6d4061e4-a074-445d-95c5-239014ee87f3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1833.425959] env[62684]: DEBUG oslo_concurrency.lockutils [None req-35ad379d-d778-477c-a6f4-0f55a64069c4 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lock "6d4061e4-a074-445d-95c5-239014ee87f3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1833.426268] env[62684]: DEBUG oslo_concurrency.lockutils [None req-35ad379d-d778-477c-a6f4-0f55a64069c4 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lock "6d4061e4-a074-445d-95c5-239014ee87f3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1833.428948] env[62684]: INFO nova.compute.manager [None req-35ad379d-d778-477c-a6f4-0f55a64069c4 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Terminating instance [ 1833.433257] env[62684]: DEBUG nova.compute.manager [None req-35ad379d-d778-477c-a6f4-0f55a64069c4 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1833.433583] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-35ad379d-d778-477c-a6f4-0f55a64069c4 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1833.434530] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d7550d2-fac5-477f-8970-a4406a58b4ed {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.443690] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-35ad379d-d778-477c-a6f4-0f55a64069c4 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1833.443999] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-47753a50-ddf5-491e-a606-8cb35d26fdc9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.453622] env[62684]: DEBUG oslo_vmware.api [None req-35ad379d-d778-477c-a6f4-0f55a64069c4 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for the task: (returnval){ [ 1833.453622] env[62684]: value = "task-2052595" [ 1833.453622] env[62684]: _type = "Task" [ 1833.453622] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1833.465069] env[62684]: DEBUG oslo_vmware.api [None req-35ad379d-d778-477c-a6f4-0f55a64069c4 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052595, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.614399] env[62684]: INFO nova.compute.manager [-] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Took 1.28 seconds to deallocate network for instance. [ 1833.640902] env[62684]: DEBUG nova.compute.manager [req-c7ef7083-7898-47ea-9bc9-a92ebd0ee878 req-917ce356-39ec-4286-8f2b-9d40f7efa565 service nova] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Received event network-changed-8f6b3e69-1998-4808-9c1a-1224c8ab5363 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1833.641473] env[62684]: DEBUG nova.compute.manager [req-c7ef7083-7898-47ea-9bc9-a92ebd0ee878 req-917ce356-39ec-4286-8f2b-9d40f7efa565 service nova] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Refreshing instance network info cache due to event network-changed-8f6b3e69-1998-4808-9c1a-1224c8ab5363. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1833.641735] env[62684]: DEBUG oslo_concurrency.lockutils [req-c7ef7083-7898-47ea-9bc9-a92ebd0ee878 req-917ce356-39ec-4286-8f2b-9d40f7efa565 service nova] Acquiring lock "refresh_cache-6b1f0e69-3915-40dc-b4ec-93ab174f12b6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1833.641904] env[62684]: DEBUG oslo_concurrency.lockutils [req-c7ef7083-7898-47ea-9bc9-a92ebd0ee878 req-917ce356-39ec-4286-8f2b-9d40f7efa565 service nova] Acquired lock "refresh_cache-6b1f0e69-3915-40dc-b4ec-93ab174f12b6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1833.642285] env[62684]: DEBUG nova.network.neutron [req-c7ef7083-7898-47ea-9bc9-a92ebd0ee878 req-917ce356-39ec-4286-8f2b-9d40f7efa565 service nova] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Refreshing network info cache for port 8f6b3e69-1998-4808-9c1a-1224c8ab5363 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1833.661223] env[62684]: DEBUG oslo_vmware.api [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523efdb8-0d5c-26f8-1155-a7f82fb9321c, 'name': SearchDatastore_Task, 'duration_secs': 0.011579} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1833.662572] env[62684]: DEBUG oslo_concurrency.lockutils [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1833.662887] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] e08f8636-5193-40fa-972c-f0ecab193fc1/e08f8636-5193-40fa-972c-f0ecab193fc1.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1833.663285] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2a7481e7-d99f-4a5e-a5ab-2dbcd55ef576 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.672887] env[62684]: DEBUG oslo_vmware.api [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1833.672887] env[62684]: value = "task-2052596" [ 1833.672887] env[62684]: _type = "Task" [ 1833.672887] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1833.682255] env[62684]: DEBUG oslo_vmware.api [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052596, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.719435] env[62684]: DEBUG nova.compute.manager [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1833.748016] env[62684]: DEBUG nova.virt.hardware [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1833.748330] env[62684]: DEBUG nova.virt.hardware [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1833.748519] env[62684]: DEBUG nova.virt.hardware [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1833.748715] env[62684]: DEBUG nova.virt.hardware [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1833.748885] env[62684]: DEBUG nova.virt.hardware [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1833.749058] env[62684]: DEBUG nova.virt.hardware [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1833.749280] env[62684]: DEBUG nova.virt.hardware [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1833.749462] env[62684]: DEBUG nova.virt.hardware [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1833.749640] env[62684]: DEBUG nova.virt.hardware [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1833.749811] env[62684]: DEBUG nova.virt.hardware [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1833.750077] env[62684]: DEBUG nova.virt.hardware [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1833.750947] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e09c85e3-e3d0-4861-a63f-2dab593befb0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.760220] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4882b3c4-cdbe-4483-9fda-0a7dd5e2bcb9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.773867] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052594, 'name': CreateVM_Task, 'duration_secs': 0.416706} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1833.782129] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1833.783190] env[62684]: DEBUG oslo_concurrency.lockutils [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1833.783544] env[62684]: DEBUG oslo_concurrency.lockutils [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1833.783742] env[62684]: DEBUG oslo_concurrency.lockutils [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1833.784039] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0cb11a37-c181-40c8-9cb8-aea06cb82179 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.790083] env[62684]: DEBUG oslo_vmware.api [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Waiting for the task: (returnval){ [ 1833.790083] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5234b977-4469-9f1b-b337-a9f088671e57" [ 1833.790083] env[62684]: _type = "Task" [ 1833.790083] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1833.799371] env[62684]: DEBUG oslo_vmware.api [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5234b977-4469-9f1b-b337-a9f088671e57, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.858377] env[62684]: DEBUG nova.scheduler.client.report [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1833.865897] env[62684]: DEBUG oslo_concurrency.lockutils [req-ad8398d2-fbdb-43fb-b828-e7e5880ad19c req-08799e3a-528c-4f82-94e0-12707a30a934 service nova] Releasing lock "refresh_cache-3a172e9f-9f79-489e-9571-80bd74ad8609" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1833.964529] env[62684]: DEBUG oslo_vmware.api [None req-35ad379d-d778-477c-a6f4-0f55a64069c4 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052595, 'name': PowerOffVM_Task, 'duration_secs': 0.263002} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1833.964846] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-35ad379d-d778-477c-a6f4-0f55a64069c4 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1833.965060] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-35ad379d-d778-477c-a6f4-0f55a64069c4 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1833.965418] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a53009a9-c0ec-4a4b-a14e-52c6bae565cb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.044433] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-35ad379d-d778-477c-a6f4-0f55a64069c4 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1834.044706] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-35ad379d-d778-477c-a6f4-0f55a64069c4 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1834.044945] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-35ad379d-d778-477c-a6f4-0f55a64069c4 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Deleting the datastore file [datastore1] 6d4061e4-a074-445d-95c5-239014ee87f3 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1834.045231] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2eaf0445-8593-4b9f-bc2e-880d76c7aba3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.052387] env[62684]: DEBUG oslo_vmware.api [None req-35ad379d-d778-477c-a6f4-0f55a64069c4 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for the task: (returnval){ [ 1834.052387] env[62684]: value = "task-2052598" [ 1834.052387] env[62684]: _type = "Task" [ 1834.052387] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1834.061966] env[62684]: DEBUG oslo_vmware.api [None req-35ad379d-d778-477c-a6f4-0f55a64069c4 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052598, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.123473] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b79826dc-dd2b-41dc-933a-2141b6a4d834 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1834.184884] env[62684]: DEBUG oslo_vmware.api [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052596, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.294358] env[62684]: DEBUG nova.compute.manager [req-46d51f2d-da3f-4e28-9763-de1b82f3f4a7 req-a3859ba6-0523-4321-9edc-aab962b77160 service nova] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Received event network-changed-6f9879b5-4fa4-4afb-a3a5-8ee16a495f70 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1834.294647] env[62684]: DEBUG nova.compute.manager [req-46d51f2d-da3f-4e28-9763-de1b82f3f4a7 req-a3859ba6-0523-4321-9edc-aab962b77160 service nova] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Refreshing instance network info cache due to event network-changed-6f9879b5-4fa4-4afb-a3a5-8ee16a495f70. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1834.294823] env[62684]: DEBUG oslo_concurrency.lockutils [req-46d51f2d-da3f-4e28-9763-de1b82f3f4a7 req-a3859ba6-0523-4321-9edc-aab962b77160 service nova] Acquiring lock "refresh_cache-b788c51b-367b-4eef-93d2-faa8836469b6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1834.294963] env[62684]: DEBUG oslo_concurrency.lockutils [req-46d51f2d-da3f-4e28-9763-de1b82f3f4a7 req-a3859ba6-0523-4321-9edc-aab962b77160 service nova] Acquired lock "refresh_cache-b788c51b-367b-4eef-93d2-faa8836469b6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1834.296310] env[62684]: DEBUG nova.network.neutron [req-46d51f2d-da3f-4e28-9763-de1b82f3f4a7 req-a3859ba6-0523-4321-9edc-aab962b77160 service nova] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Refreshing network info cache for port 6f9879b5-4fa4-4afb-a3a5-8ee16a495f70 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1834.310305] env[62684]: DEBUG oslo_vmware.api [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5234b977-4469-9f1b-b337-a9f088671e57, 'name': SearchDatastore_Task, 'duration_secs': 0.01078} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1834.311317] env[62684]: DEBUG oslo_concurrency.lockutils [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1834.311403] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1834.311630] env[62684]: DEBUG oslo_concurrency.lockutils [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1834.312080] env[62684]: DEBUG oslo_concurrency.lockutils [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1834.312080] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1834.312479] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bdfa61cf-8ea1-452e-9777-47aad47ba55a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.322370] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1834.322370] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1834.322813] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9b691f4-05d7-4e1b-b78d-a3052497a4c6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.330358] env[62684]: DEBUG oslo_vmware.api [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Waiting for the task: (returnval){ [ 1834.330358] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b025b5-42f8-245f-60e3-dec5bef42786" [ 1834.330358] env[62684]: _type = "Task" [ 1834.330358] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1834.341385] env[62684]: DEBUG oslo_vmware.api [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b025b5-42f8-245f-60e3-dec5bef42786, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.348866] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5fe54878-d0fb-4cd4-a5d4-03c4c1836334 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquiring lock "52839b18-a68a-4ec7-a921-c42454955e82" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1834.349140] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5fe54878-d0fb-4cd4-a5d4-03c4c1836334 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lock "52839b18-a68a-4ec7-a921-c42454955e82" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1834.349360] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5fe54878-d0fb-4cd4-a5d4-03c4c1836334 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquiring lock "52839b18-a68a-4ec7-a921-c42454955e82-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1834.349550] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5fe54878-d0fb-4cd4-a5d4-03c4c1836334 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lock "52839b18-a68a-4ec7-a921-c42454955e82-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1834.349721] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5fe54878-d0fb-4cd4-a5d4-03c4c1836334 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lock "52839b18-a68a-4ec7-a921-c42454955e82-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1834.352666] env[62684]: INFO nova.compute.manager [None req-5fe54878-d0fb-4cd4-a5d4-03c4c1836334 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Terminating instance [ 1834.354613] env[62684]: DEBUG nova.compute.manager [None req-5fe54878-d0fb-4cd4-a5d4-03c4c1836334 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1834.354822] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5fe54878-d0fb-4cd4-a5d4-03c4c1836334 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1834.355735] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7462d536-5265-4137-a400-26894ee84d17 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.365341] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.691s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1834.366034] env[62684]: DEBUG nova.compute.manager [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1834.368522] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fe54878-d0fb-4cd4-a5d4-03c4c1836334 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1834.368809] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 28.718s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1834.370559] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b41b0673-04b3-4390-9ebc-bf08165391c7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.379261] env[62684]: DEBUG oslo_vmware.api [None req-5fe54878-d0fb-4cd4-a5d4-03c4c1836334 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for the task: (returnval){ [ 1834.379261] env[62684]: value = "task-2052599" [ 1834.379261] env[62684]: _type = "Task" [ 1834.379261] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1834.388549] env[62684]: DEBUG oslo_vmware.api [None req-5fe54878-d0fb-4cd4-a5d4-03c4c1836334 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052599, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.529235] env[62684]: DEBUG nova.network.neutron [req-c7ef7083-7898-47ea-9bc9-a92ebd0ee878 req-917ce356-39ec-4286-8f2b-9d40f7efa565 service nova] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Updated VIF entry in instance network info cache for port 8f6b3e69-1998-4808-9c1a-1224c8ab5363. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1834.529770] env[62684]: DEBUG nova.network.neutron [req-c7ef7083-7898-47ea-9bc9-a92ebd0ee878 req-917ce356-39ec-4286-8f2b-9d40f7efa565 service nova] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Updating instance_info_cache with network_info: [{"id": "8f6b3e69-1998-4808-9c1a-1224c8ab5363", "address": "fa:16:3e:f0:98:53", "network": {"id": "c4f4fd23-e4d0-4e7a-861b-71eeb3008ae5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1003879390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e169ffb4120143dca6d67108986e62f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f6b3e69-19", "ovs_interfaceid": "8f6b3e69-1998-4808-9c1a-1224c8ab5363", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1834.562608] env[62684]: DEBUG oslo_vmware.api [None req-35ad379d-d778-477c-a6f4-0f55a64069c4 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052598, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.333552} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1834.562924] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-35ad379d-d778-477c-a6f4-0f55a64069c4 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1834.563081] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-35ad379d-d778-477c-a6f4-0f55a64069c4 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1834.563266] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-35ad379d-d778-477c-a6f4-0f55a64069c4 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1834.563480] env[62684]: INFO nova.compute.manager [None req-35ad379d-d778-477c-a6f4-0f55a64069c4 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1834.563729] env[62684]: DEBUG oslo.service.loopingcall [None req-35ad379d-d778-477c-a6f4-0f55a64069c4 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1834.563929] env[62684]: DEBUG nova.compute.manager [-] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1834.564045] env[62684]: DEBUG nova.network.neutron [-] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1834.684968] env[62684]: DEBUG oslo_vmware.api [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052596, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.566959} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1834.685336] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] e08f8636-5193-40fa-972c-f0ecab193fc1/e08f8636-5193-40fa-972c-f0ecab193fc1.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1834.685711] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1834.686069] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4eb01ef7-313b-43ce-9f35-b89f62eb5848 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.695145] env[62684]: DEBUG oslo_vmware.api [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1834.695145] env[62684]: value = "task-2052600" [ 1834.695145] env[62684]: _type = "Task" [ 1834.695145] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1834.704180] env[62684]: DEBUG oslo_vmware.api [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052600, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.842609] env[62684]: DEBUG oslo_vmware.api [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b025b5-42f8-245f-60e3-dec5bef42786, 'name': SearchDatastore_Task, 'duration_secs': 0.014789} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1834.843553] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1d7ad55-d0e7-47ab-9475-5ff12460a8c9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.850278] env[62684]: DEBUG oslo_vmware.api [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Waiting for the task: (returnval){ [ 1834.850278] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529b5b14-f774-bd27-75c1-844681bf0fa8" [ 1834.850278] env[62684]: _type = "Task" [ 1834.850278] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1834.865393] env[62684]: DEBUG oslo_vmware.api [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529b5b14-f774-bd27-75c1-844681bf0fa8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.873998] env[62684]: DEBUG nova.compute.utils [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1834.885503] env[62684]: DEBUG nova.compute.manager [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Not allocating networking since 'none' was specified. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1834.898731] env[62684]: DEBUG oslo_vmware.api [None req-5fe54878-d0fb-4cd4-a5d4-03c4c1836334 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052599, 'name': PowerOffVM_Task, 'duration_secs': 0.276422} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1834.899044] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fe54878-d0fb-4cd4-a5d4-03c4c1836334 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1834.899243] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5fe54878-d0fb-4cd4-a5d4-03c4c1836334 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1834.899519] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9d0c6cc3-66d1-4dce-bce7-03d21560bdea {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.018645] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5fe54878-d0fb-4cd4-a5d4-03c4c1836334 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1835.018908] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5fe54878-d0fb-4cd4-a5d4-03c4c1836334 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1835.019110] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fe54878-d0fb-4cd4-a5d4-03c4c1836334 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Deleting the datastore file [datastore2] 52839b18-a68a-4ec7-a921-c42454955e82 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1835.019388] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-735c7c92-9c47-4729-a1c8-31d747aa75f6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.026859] env[62684]: DEBUG oslo_vmware.api [None req-5fe54878-d0fb-4cd4-a5d4-03c4c1836334 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for the task: (returnval){ [ 1835.026859] env[62684]: value = "task-2052602" [ 1835.026859] env[62684]: _type = "Task" [ 1835.026859] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.034887] env[62684]: DEBUG oslo_concurrency.lockutils [req-c7ef7083-7898-47ea-9bc9-a92ebd0ee878 req-917ce356-39ec-4286-8f2b-9d40f7efa565 service nova] Releasing lock "refresh_cache-6b1f0e69-3915-40dc-b4ec-93ab174f12b6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1835.044621] env[62684]: DEBUG oslo_vmware.api [None req-5fe54878-d0fb-4cd4-a5d4-03c4c1836334 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052602, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.077259] env[62684]: DEBUG nova.network.neutron [req-46d51f2d-da3f-4e28-9763-de1b82f3f4a7 req-a3859ba6-0523-4321-9edc-aab962b77160 service nova] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Updated VIF entry in instance network info cache for port 6f9879b5-4fa4-4afb-a3a5-8ee16a495f70. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1835.077717] env[62684]: DEBUG nova.network.neutron [req-46d51f2d-da3f-4e28-9763-de1b82f3f4a7 req-a3859ba6-0523-4321-9edc-aab962b77160 service nova] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Updating instance_info_cache with network_info: [{"id": "6f9879b5-4fa4-4afb-a3a5-8ee16a495f70", "address": "fa:16:3e:e8:c1:b8", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.233", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f9879b5-4f", "ovs_interfaceid": "6f9879b5-4fa4-4afb-a3a5-8ee16a495f70", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1835.206073] env[62684]: DEBUG oslo_vmware.api [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052600, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.105784} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1835.206073] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1835.206903] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b69cce6e-8622-4a0c-84f1-f6ab9a530f6f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.234511] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Reconfiguring VM instance instance-00000020 to attach disk [datastore1] e08f8636-5193-40fa-972c-f0ecab193fc1/e08f8636-5193-40fa-972c-f0ecab193fc1.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1835.235625] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0a3b793b-fe8c-411f-923f-8ce8adb5cb3d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.256971] env[62684]: DEBUG oslo_vmware.api [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1835.256971] env[62684]: value = "task-2052603" [ 1835.256971] env[62684]: _type = "Task" [ 1835.256971] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.264488] env[62684]: DEBUG oslo_vmware.api [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052603, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.271961] env[62684]: DEBUG oslo_vmware.rw_handles [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bb9436-dd71-2af5-9c9a-6539cdecc7e2/disk-0.vmdk. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1835.272745] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5680875-40a5-416b-ac04-2ff44be12efa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.281166] env[62684]: DEBUG oslo_vmware.rw_handles [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bb9436-dd71-2af5-9c9a-6539cdecc7e2/disk-0.vmdk is in state: ready. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1835.281166] env[62684]: ERROR oslo_vmware.rw_handles [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bb9436-dd71-2af5-9c9a-6539cdecc7e2/disk-0.vmdk due to incomplete transfer. [ 1835.281166] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-cbb925eb-c071-4e5f-8016-a9bed433ab91 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.286363] env[62684]: DEBUG oslo_vmware.rw_handles [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bb9436-dd71-2af5-9c9a-6539cdecc7e2/disk-0.vmdk. {{(pid=62684) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1835.286608] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Uploaded image 01216b0e-c832-4bdf-bcff-de5e6e30665f to the Glance image server {{(pid=62684) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1835.289789] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Destroying the VM {{(pid=62684) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1835.290073] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-cebdf8f3-c9db-4f71-a6b1-60acf7834b92 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.297884] env[62684]: DEBUG oslo_vmware.api [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Waiting for the task: (returnval){ [ 1835.297884] env[62684]: value = "task-2052604" [ 1835.297884] env[62684]: _type = "Task" [ 1835.297884] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.306883] env[62684]: DEBUG oslo_vmware.api [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052604, 'name': Destroy_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.366024] env[62684]: DEBUG oslo_concurrency.lockutils [None req-72150c9f-0895-4b84-806f-aa4d16d5d795 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Acquiring lock "3a172e9f-9f79-489e-9571-80bd74ad8609" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1835.366024] env[62684]: DEBUG oslo_vmware.api [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529b5b14-f774-bd27-75c1-844681bf0fa8, 'name': SearchDatastore_Task, 'duration_secs': 0.031641} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1835.366024] env[62684]: DEBUG oslo_concurrency.lockutils [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1835.366024] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] b788c51b-367b-4eef-93d2-faa8836469b6/b788c51b-367b-4eef-93d2-faa8836469b6.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1835.366317] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9a4722e6-cc23-40e5-a2ea-164fec1e9088 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.370776] env[62684]: DEBUG oslo_vmware.api [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Waiting for the task: (returnval){ [ 1835.370776] env[62684]: value = "task-2052605" [ 1835.370776] env[62684]: _type = "Task" [ 1835.370776] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.381120] env[62684]: DEBUG oslo_vmware.api [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': task-2052605, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.385783] env[62684]: DEBUG nova.compute.manager [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1835.428320] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance effc673a-103f-413b-88ac-6907ad1ee852 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.428491] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance fb7f38a0-bcfa-4d96-bde3-20d6f1d70112 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.428615] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 6b1f0e69-3915-40dc-b4ec-93ab174f12b6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.428733] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance d532b5fa-90a3-4f25-8684-4eabaa432c86 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.428844] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 73f27fc0-ebae-41c7-b292-14396f79a5a2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.428951] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance dcb0a5b2-379e-44ff-a9b0-be615943c94e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.429074] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.429213] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 06751c34-0724-44ba-a263-ad27fcf2920f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.429327] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 0dbd52ac-c987-4728-974e-73e99465c5e7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.429436] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 0676806b-c1f0-4c1a-a12d-add2edf1588f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.429575] env[62684]: WARNING nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 17d30180-9770-4329-a6d8-757a93514a96 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1835.429704] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance ab2c7cbe-6f46-4174-bffb-055a15f2d56b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.429827] env[62684]: WARNING nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance ca22ca59-1b60-46f0-ae83-03ed4002fa0d is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1835.429950] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance a9dfeb4d-a92e-41cf-9d2f-43086cc9e868 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.430317] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.430497] env[62684]: WARNING nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 8c046991-b294-4f33-9fce-a241984d66d7 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1835.430617] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance f44b2e88-af6d-4252-b562-9d5fa7745b56 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.430740] env[62684]: WARNING nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance dfe40a8c-61d6-4c60-afd3-0defb61c4308 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1835.430877] env[62684]: WARNING nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 3a172e9f-9f79-489e-9571-80bd74ad8609 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1835.430999] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 6d4061e4-a074-445d-95c5-239014ee87f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.431126] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 52839b18-a68a-4ec7-a921-c42454955e82 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.431238] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance e08f8636-5193-40fa-972c-f0ecab193fc1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.431351] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance b788c51b-367b-4eef-93d2-faa8836469b6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.431471] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance d06f3099-d05f-417f-a71a-7b368590624f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.431608] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance e2a9ab56-bde3-40b6-a214-19c77a9c6778 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.457832] env[62684]: DEBUG nova.network.neutron [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Successfully updated port: f99cf594-91e3-4f98-85ce-def4475f0620 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1835.524419] env[62684]: DEBUG nova.network.neutron [-] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1835.542020] env[62684]: DEBUG oslo_vmware.api [None req-5fe54878-d0fb-4cd4-a5d4-03c4c1836334 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052602, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.211411} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1835.542283] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fe54878-d0fb-4cd4-a5d4-03c4c1836334 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1835.542867] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5fe54878-d0fb-4cd4-a5d4-03c4c1836334 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1835.543106] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5fe54878-d0fb-4cd4-a5d4-03c4c1836334 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1835.543338] env[62684]: INFO nova.compute.manager [None req-5fe54878-d0fb-4cd4-a5d4-03c4c1836334 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1835.543594] env[62684]: DEBUG oslo.service.loopingcall [None req-5fe54878-d0fb-4cd4-a5d4-03c4c1836334 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1835.543842] env[62684]: DEBUG nova.compute.manager [-] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1835.543975] env[62684]: DEBUG nova.network.neutron [-] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1835.580535] env[62684]: DEBUG oslo_concurrency.lockutils [req-46d51f2d-da3f-4e28-9763-de1b82f3f4a7 req-a3859ba6-0523-4321-9edc-aab962b77160 service nova] Releasing lock "refresh_cache-b788c51b-367b-4eef-93d2-faa8836469b6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1835.580863] env[62684]: DEBUG nova.compute.manager [req-46d51f2d-da3f-4e28-9763-de1b82f3f4a7 req-a3859ba6-0523-4321-9edc-aab962b77160 service nova] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Received event network-vif-deleted-1ce065ea-4c6e-4c34-8b7f-27a3b14d0924 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1835.746769] env[62684]: DEBUG nova.compute.manager [req-52b0bd6d-149c-46dc-a88e-a06715901c7d req-6bbabc72-5114-43f5-828d-f6e8bc2dfa17 service nova] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Received event network-changed-8f6b3e69-1998-4808-9c1a-1224c8ab5363 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1835.746988] env[62684]: DEBUG nova.compute.manager [req-52b0bd6d-149c-46dc-a88e-a06715901c7d req-6bbabc72-5114-43f5-828d-f6e8bc2dfa17 service nova] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Refreshing instance network info cache due to event network-changed-8f6b3e69-1998-4808-9c1a-1224c8ab5363. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1835.747235] env[62684]: DEBUG oslo_concurrency.lockutils [req-52b0bd6d-149c-46dc-a88e-a06715901c7d req-6bbabc72-5114-43f5-828d-f6e8bc2dfa17 service nova] Acquiring lock "refresh_cache-6b1f0e69-3915-40dc-b4ec-93ab174f12b6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1835.747385] env[62684]: DEBUG oslo_concurrency.lockutils [req-52b0bd6d-149c-46dc-a88e-a06715901c7d req-6bbabc72-5114-43f5-828d-f6e8bc2dfa17 service nova] Acquired lock "refresh_cache-6b1f0e69-3915-40dc-b4ec-93ab174f12b6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1835.747548] env[62684]: DEBUG nova.network.neutron [req-52b0bd6d-149c-46dc-a88e-a06715901c7d req-6bbabc72-5114-43f5-828d-f6e8bc2dfa17 service nova] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Refreshing network info cache for port 8f6b3e69-1998-4808-9c1a-1224c8ab5363 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1835.773546] env[62684]: DEBUG oslo_vmware.api [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052603, 'name': ReconfigVM_Task, 'duration_secs': 0.331436} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1835.773546] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Reconfigured VM instance instance-00000020 to attach disk [datastore1] e08f8636-5193-40fa-972c-f0ecab193fc1/e08f8636-5193-40fa-972c-f0ecab193fc1.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1835.773546] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4bb8cf93-3a0f-4bd6-8947-cc8712cbee85 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.779807] env[62684]: DEBUG oslo_vmware.api [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1835.779807] env[62684]: value = "task-2052606" [ 1835.779807] env[62684]: _type = "Task" [ 1835.779807] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.790585] env[62684]: DEBUG oslo_vmware.api [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052606, 'name': Rename_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.808446] env[62684]: DEBUG oslo_vmware.api [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052604, 'name': Destroy_Task} progress is 33%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.883288] env[62684]: DEBUG oslo_vmware.api [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': task-2052605, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.935487] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 4a15d298-115f-4132-8be0-00e623fa21d8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1835.960775] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Acquiring lock "refresh_cache-d06f3099-d05f-417f-a71a-7b368590624f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1835.960925] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Acquired lock "refresh_cache-d06f3099-d05f-417f-a71a-7b368590624f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1835.961126] env[62684]: DEBUG nova.network.neutron [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1836.027224] env[62684]: INFO nova.compute.manager [-] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Took 1.46 seconds to deallocate network for instance. [ 1836.294029] env[62684]: DEBUG oslo_vmware.api [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052606, 'name': Rename_Task, 'duration_secs': 0.225355} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1836.294235] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1836.294484] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0985d64f-d2a6-48dd-8f92-6f492dca02ab {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.303798] env[62684]: DEBUG oslo_vmware.api [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1836.303798] env[62684]: value = "task-2052607" [ 1836.303798] env[62684]: _type = "Task" [ 1836.303798] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1836.309677] env[62684]: DEBUG oslo_vmware.api [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052604, 'name': Destroy_Task, 'duration_secs': 0.547937} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1836.309796] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Destroyed the VM [ 1836.310042] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Deleting Snapshot of the VM instance {{(pid=62684) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1836.310286] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-06dfbd46-3b30-4f4d-83ac-e4ee8131862d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.314650] env[62684]: DEBUG oslo_vmware.api [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052607, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.319913] env[62684]: DEBUG oslo_vmware.api [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Waiting for the task: (returnval){ [ 1836.319913] env[62684]: value = "task-2052608" [ 1836.319913] env[62684]: _type = "Task" [ 1836.319913] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1836.327239] env[62684]: DEBUG oslo_vmware.api [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052608, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.385206] env[62684]: DEBUG oslo_vmware.api [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': task-2052605, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.539821} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1836.385495] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] b788c51b-367b-4eef-93d2-faa8836469b6/b788c51b-367b-4eef-93d2-faa8836469b6.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1836.385718] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1836.385973] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-deb654a3-6b88-4be1-8751-b01d61b0c922 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.392303] env[62684]: DEBUG oslo_vmware.api [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Waiting for the task: (returnval){ [ 1836.392303] env[62684]: value = "task-2052609" [ 1836.392303] env[62684]: _type = "Task" [ 1836.392303] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1836.400126] env[62684]: DEBUG oslo_vmware.api [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': task-2052609, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.407453] env[62684]: DEBUG nova.compute.manager [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1836.436045] env[62684]: DEBUG nova.virt.hardware [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1836.436371] env[62684]: DEBUG nova.virt.hardware [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1836.436574] env[62684]: DEBUG nova.virt.hardware [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1836.436771] env[62684]: DEBUG nova.virt.hardware [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1836.436920] env[62684]: DEBUG nova.virt.hardware [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1836.437107] env[62684]: DEBUG nova.virt.hardware [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1836.437344] env[62684]: DEBUG nova.virt.hardware [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1836.437516] env[62684]: DEBUG nova.virt.hardware [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1836.437684] env[62684]: DEBUG nova.virt.hardware [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1836.437880] env[62684]: DEBUG nova.virt.hardware [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1836.438123] env[62684]: DEBUG nova.virt.hardware [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1836.438998] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74d866bf-4b9f-4e25-aca0-6ce5ef7a5f92 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.442144] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance b4cd871a-30ea-4b7a-98ad-00b8676dc2cd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1836.443400] env[62684]: DEBUG nova.network.neutron [-] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1836.456403] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-635ad297-af5b-488d-ab5e-0aff4b43b81b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.475798] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Instance VIF info [] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1836.484359] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Creating folder: Project (34b8116ae07d4c8494c4ee49c63df975). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1836.487394] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4b55f786-57a3-417c-a258-f06255821f6e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.499105] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Created folder: Project (34b8116ae07d4c8494c4ee49c63df975) in parent group-v421118. [ 1836.499269] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Creating folder: Instances. Parent ref: group-v421217. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1836.499467] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-51d4331e-58de-40a2-bcde-5de979747348 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.510514] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Created folder: Instances in parent group-v421217. [ 1836.510769] env[62684]: DEBUG oslo.service.loopingcall [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1836.510982] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1836.511232] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-684e05db-027f-41c9-8067-760cd577434c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.523616] env[62684]: DEBUG nova.network.neutron [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1836.529764] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1836.529764] env[62684]: value = "task-2052612" [ 1836.529764] env[62684]: _type = "Task" [ 1836.529764] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1836.533867] env[62684]: DEBUG oslo_concurrency.lockutils [None req-35ad379d-d778-477c-a6f4-0f55a64069c4 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.535754] env[62684]: DEBUG nova.network.neutron [req-52b0bd6d-149c-46dc-a88e-a06715901c7d req-6bbabc72-5114-43f5-828d-f6e8bc2dfa17 service nova] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Updated VIF entry in instance network info cache for port 8f6b3e69-1998-4808-9c1a-1224c8ab5363. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1836.536100] env[62684]: DEBUG nova.network.neutron [req-52b0bd6d-149c-46dc-a88e-a06715901c7d req-6bbabc72-5114-43f5-828d-f6e8bc2dfa17 service nova] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Updating instance_info_cache with network_info: [{"id": "8f6b3e69-1998-4808-9c1a-1224c8ab5363", "address": "fa:16:3e:f0:98:53", "network": {"id": "c4f4fd23-e4d0-4e7a-861b-71eeb3008ae5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1003879390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e169ffb4120143dca6d67108986e62f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f6b3e69-19", "ovs_interfaceid": "8f6b3e69-1998-4808-9c1a-1224c8ab5363", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1836.539941] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052612, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.665252] env[62684]: DEBUG nova.network.neutron [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Updating instance_info_cache with network_info: [{"id": "f99cf594-91e3-4f98-85ce-def4475f0620", "address": "fa:16:3e:7e:2e:12", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.199", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf99cf594-91", "ovs_interfaceid": "f99cf594-91e3-4f98-85ce-def4475f0620", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1836.814038] env[62684]: DEBUG oslo_vmware.api [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052607, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.830661] env[62684]: DEBUG oslo_vmware.api [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052608, 'name': RemoveSnapshot_Task} progress is 12%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.905027] env[62684]: DEBUG oslo_vmware.api [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': task-2052609, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067847} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1836.905430] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1836.906350] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f0612c4-ab81-4b2f-ba8f-cb7306915c73 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.929818] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Reconfiguring VM instance instance-00000021 to attach disk [datastore1] b788c51b-367b-4eef-93d2-faa8836469b6/b788c51b-367b-4eef-93d2-faa8836469b6.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1836.930234] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d8213f5-91d8-4d7d-860d-82d0516acf5d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.949734] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance cfe219da-adf9-44b9-9df3-752ccf72a68b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1836.951526] env[62684]: INFO nova.compute.manager [-] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Took 1.41 seconds to deallocate network for instance. [ 1836.955047] env[62684]: DEBUG oslo_vmware.api [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Waiting for the task: (returnval){ [ 1836.955047] env[62684]: value = "task-2052613" [ 1836.955047] env[62684]: _type = "Task" [ 1836.955047] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1836.968616] env[62684]: DEBUG oslo_vmware.api [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': task-2052613, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.043759] env[62684]: DEBUG oslo_concurrency.lockutils [req-52b0bd6d-149c-46dc-a88e-a06715901c7d req-6bbabc72-5114-43f5-828d-f6e8bc2dfa17 service nova] Releasing lock "refresh_cache-6b1f0e69-3915-40dc-b4ec-93ab174f12b6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1837.044178] env[62684]: DEBUG nova.compute.manager [req-52b0bd6d-149c-46dc-a88e-a06715901c7d req-6bbabc72-5114-43f5-828d-f6e8bc2dfa17 service nova] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Received event network-vif-deleted-75bc051d-717c-4b1e-9b3c-e79874e6d941 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1837.044359] env[62684]: DEBUG nova.compute.manager [req-52b0bd6d-149c-46dc-a88e-a06715901c7d req-6bbabc72-5114-43f5-828d-f6e8bc2dfa17 service nova] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Received event network-vif-plugged-f99cf594-91e3-4f98-85ce-def4475f0620 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1837.044604] env[62684]: DEBUG oslo_concurrency.lockutils [req-52b0bd6d-149c-46dc-a88e-a06715901c7d req-6bbabc72-5114-43f5-828d-f6e8bc2dfa17 service nova] Acquiring lock "d06f3099-d05f-417f-a71a-7b368590624f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1837.044851] env[62684]: DEBUG oslo_concurrency.lockutils [req-52b0bd6d-149c-46dc-a88e-a06715901c7d req-6bbabc72-5114-43f5-828d-f6e8bc2dfa17 service nova] Lock "d06f3099-d05f-417f-a71a-7b368590624f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1837.045159] env[62684]: DEBUG oslo_concurrency.lockutils [req-52b0bd6d-149c-46dc-a88e-a06715901c7d req-6bbabc72-5114-43f5-828d-f6e8bc2dfa17 service nova] Lock "d06f3099-d05f-417f-a71a-7b368590624f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1837.045331] env[62684]: DEBUG nova.compute.manager [req-52b0bd6d-149c-46dc-a88e-a06715901c7d req-6bbabc72-5114-43f5-828d-f6e8bc2dfa17 service nova] [instance: d06f3099-d05f-417f-a71a-7b368590624f] No waiting events found dispatching network-vif-plugged-f99cf594-91e3-4f98-85ce-def4475f0620 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1837.045559] env[62684]: WARNING nova.compute.manager [req-52b0bd6d-149c-46dc-a88e-a06715901c7d req-6bbabc72-5114-43f5-828d-f6e8bc2dfa17 service nova] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Received unexpected event network-vif-plugged-f99cf594-91e3-4f98-85ce-def4475f0620 for instance with vm_state building and task_state spawning. [ 1837.045738] env[62684]: DEBUG nova.compute.manager [req-52b0bd6d-149c-46dc-a88e-a06715901c7d req-6bbabc72-5114-43f5-828d-f6e8bc2dfa17 service nova] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Received event network-changed-f99cf594-91e3-4f98-85ce-def4475f0620 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1837.045902] env[62684]: DEBUG nova.compute.manager [req-52b0bd6d-149c-46dc-a88e-a06715901c7d req-6bbabc72-5114-43f5-828d-f6e8bc2dfa17 service nova] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Refreshing instance network info cache due to event network-changed-f99cf594-91e3-4f98-85ce-def4475f0620. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1837.046082] env[62684]: DEBUG oslo_concurrency.lockutils [req-52b0bd6d-149c-46dc-a88e-a06715901c7d req-6bbabc72-5114-43f5-828d-f6e8bc2dfa17 service nova] Acquiring lock "refresh_cache-d06f3099-d05f-417f-a71a-7b368590624f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1837.046269] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052612, 'name': CreateVM_Task, 'duration_secs': 0.310322} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.046412] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1837.046775] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1837.046966] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1837.047303] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1837.047562] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04d19dd5-a83c-4f41-9dc0-b6a4fc1d106a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.052446] env[62684]: DEBUG oslo_vmware.api [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Waiting for the task: (returnval){ [ 1837.052446] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5247352d-5874-fc00-c318-b33e6827aa1e" [ 1837.052446] env[62684]: _type = "Task" [ 1837.052446] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.061478] env[62684]: DEBUG oslo_vmware.api [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5247352d-5874-fc00-c318-b33e6827aa1e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.167911] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Releasing lock "refresh_cache-d06f3099-d05f-417f-a71a-7b368590624f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1837.168295] env[62684]: DEBUG nova.compute.manager [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Instance network_info: |[{"id": "f99cf594-91e3-4f98-85ce-def4475f0620", "address": "fa:16:3e:7e:2e:12", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.199", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf99cf594-91", "ovs_interfaceid": "f99cf594-91e3-4f98-85ce-def4475f0620", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1837.168629] env[62684]: DEBUG oslo_concurrency.lockutils [req-52b0bd6d-149c-46dc-a88e-a06715901c7d req-6bbabc72-5114-43f5-828d-f6e8bc2dfa17 service nova] Acquired lock "refresh_cache-d06f3099-d05f-417f-a71a-7b368590624f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1837.168808] env[62684]: DEBUG nova.network.neutron [req-52b0bd6d-149c-46dc-a88e-a06715901c7d req-6bbabc72-5114-43f5-828d-f6e8bc2dfa17 service nova] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Refreshing network info cache for port f99cf594-91e3-4f98-85ce-def4475f0620 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1837.170026] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7e:2e:12', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ded18042-834c-4792-b3e8-b1c377446432', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f99cf594-91e3-4f98-85ce-def4475f0620', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1837.177396] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Creating folder: Project (001dd1d6b30145b69e13ef73a58be713). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1837.178508] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a231427a-e90b-46bc-bf40-12149f65469e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.190203] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Created folder: Project (001dd1d6b30145b69e13ef73a58be713) in parent group-v421118. [ 1837.190421] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Creating folder: Instances. Parent ref: group-v421220. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1837.190677] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-79dddbb6-eeb1-49d8-959f-d9d20b18dd88 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.201140] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Created folder: Instances in parent group-v421220. [ 1837.201386] env[62684]: DEBUG oslo.service.loopingcall [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1837.201585] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1837.201801] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d742b96d-d2ee-40a8-a2b8-db244b986c93 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.224446] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1837.224446] env[62684]: value = "task-2052616" [ 1837.224446] env[62684]: _type = "Task" [ 1837.224446] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.232136] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052616, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.317820] env[62684]: DEBUG oslo_vmware.api [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052607, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.331028] env[62684]: DEBUG oslo_vmware.api [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052608, 'name': RemoveSnapshot_Task, 'duration_secs': 0.663044} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.331497] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Deleted Snapshot of the VM instance {{(pid=62684) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1837.331873] env[62684]: INFO nova.compute.manager [None req-9c72373c-9de1-4f43-a342-901d5d578d94 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Took 15.82 seconds to snapshot the instance on the hypervisor. [ 1837.457038] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance c6dc5401-f59e-4c18-9553-1240e2f49bce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1837.463987] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5fe54878-d0fb-4cd4-a5d4-03c4c1836334 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1837.467618] env[62684]: DEBUG oslo_vmware.api [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': task-2052613, 'name': ReconfigVM_Task, 'duration_secs': 0.481565} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.467892] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Reconfigured VM instance instance-00000021 to attach disk [datastore1] b788c51b-367b-4eef-93d2-faa8836469b6/b788c51b-367b-4eef-93d2-faa8836469b6.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1837.468612] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e79338ad-b8c3-464e-bf19-14c831daf579 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.476705] env[62684]: DEBUG oslo_vmware.api [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Waiting for the task: (returnval){ [ 1837.476705] env[62684]: value = "task-2052617" [ 1837.476705] env[62684]: _type = "Task" [ 1837.476705] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.484245] env[62684]: DEBUG oslo_vmware.api [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': task-2052617, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.566766] env[62684]: DEBUG oslo_vmware.api [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5247352d-5874-fc00-c318-b33e6827aa1e, 'name': SearchDatastore_Task, 'duration_secs': 0.016685} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.567275] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1837.567545] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1837.567829] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1837.568052] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1837.568289] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1837.568870] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d69c94df-a011-46ff-898a-bb82a11c78a0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.578656] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1837.578941] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1837.580071] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89d1fa6c-95fb-4591-910c-2f8220bc26a0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.586051] env[62684]: DEBUG oslo_vmware.api [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Waiting for the task: (returnval){ [ 1837.586051] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]528d753e-e721-b2d1-1fe0-11ba945f30ec" [ 1837.586051] env[62684]: _type = "Task" [ 1837.586051] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.594762] env[62684]: DEBUG oslo_vmware.api [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]528d753e-e721-b2d1-1fe0-11ba945f30ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.735103] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052616, 'name': CreateVM_Task, 'duration_secs': 0.461993} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.735355] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1837.736013] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1837.736189] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1837.736533] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1837.736788] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40dda8fc-b2a7-4e88-9b9d-b977dda423e5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.741033] env[62684]: DEBUG oslo_vmware.api [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Waiting for the task: (returnval){ [ 1837.741033] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d56ff3-d44e-6dee-392f-77745d3bbdca" [ 1837.741033] env[62684]: _type = "Task" [ 1837.741033] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.750145] env[62684]: DEBUG oslo_vmware.api [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d56ff3-d44e-6dee-392f-77745d3bbdca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.772063] env[62684]: DEBUG nova.compute.manager [req-73ffa645-b31a-4dfe-8fe3-dd621fabec33 req-7a45e471-8c0c-4053-bab5-d0a0d2fa4a5c service nova] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Received event network-vif-deleted-e6a11cfd-6369-4dd8-a41b-dd7ef4d49a87 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1837.819060] env[62684]: DEBUG oslo_vmware.api [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052607, 'name': PowerOnVM_Task, 'duration_secs': 1.261576} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.819345] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1837.819547] env[62684]: INFO nova.compute.manager [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Took 9.79 seconds to spawn the instance on the hypervisor. [ 1837.819727] env[62684]: DEBUG nova.compute.manager [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1837.820542] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e010c499-3a66-4b66-abaf-3a83b7416273 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.881583] env[62684]: DEBUG nova.network.neutron [req-52b0bd6d-149c-46dc-a88e-a06715901c7d req-6bbabc72-5114-43f5-828d-f6e8bc2dfa17 service nova] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Updated VIF entry in instance network info cache for port f99cf594-91e3-4f98-85ce-def4475f0620. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1837.881984] env[62684]: DEBUG nova.network.neutron [req-52b0bd6d-149c-46dc-a88e-a06715901c7d req-6bbabc72-5114-43f5-828d-f6e8bc2dfa17 service nova] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Updating instance_info_cache with network_info: [{"id": "f99cf594-91e3-4f98-85ce-def4475f0620", "address": "fa:16:3e:7e:2e:12", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.199", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf99cf594-91", "ovs_interfaceid": "f99cf594-91e3-4f98-85ce-def4475f0620", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1837.963881] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1837.986517] env[62684]: DEBUG oslo_vmware.api [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': task-2052617, 'name': Rename_Task, 'duration_secs': 0.160275} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.986840] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1837.987111] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0ec6bcea-78e0-4c0a-b668-71c422a30a90 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.992659] env[62684]: DEBUG oslo_vmware.api [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Waiting for the task: (returnval){ [ 1837.992659] env[62684]: value = "task-2052618" [ 1837.992659] env[62684]: _type = "Task" [ 1837.992659] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1838.000525] env[62684]: DEBUG oslo_vmware.api [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': task-2052618, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.097012] env[62684]: DEBUG oslo_vmware.api [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]528d753e-e721-b2d1-1fe0-11ba945f30ec, 'name': SearchDatastore_Task, 'duration_secs': 0.015857} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1838.097891] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aedae15d-1860-4338-90d2-6df933c02430 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.102916] env[62684]: DEBUG oslo_vmware.api [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Waiting for the task: (returnval){ [ 1838.102916] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d2f120-59e1-865b-6f96-923195c34aeb" [ 1838.102916] env[62684]: _type = "Task" [ 1838.102916] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1838.110721] env[62684]: DEBUG oslo_vmware.api [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d2f120-59e1-865b-6f96-923195c34aeb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.251097] env[62684]: DEBUG oslo_vmware.api [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d56ff3-d44e-6dee-392f-77745d3bbdca, 'name': SearchDatastore_Task, 'duration_secs': 0.009185} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1838.251373] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1838.251606] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1838.251847] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1838.251988] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1838.252183] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1838.252443] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cb4097db-f516-4e35-8995-3dc8e32310cf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.260283] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1838.260486] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1838.261180] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7187e84-c48c-4c83-aa75-386040b7b074 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.266036] env[62684]: DEBUG oslo_vmware.api [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Waiting for the task: (returnval){ [ 1838.266036] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]525c805e-7ebb-2682-a670-79a813a5e339" [ 1838.266036] env[62684]: _type = "Task" [ 1838.266036] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1838.273222] env[62684]: DEBUG oslo_vmware.api [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]525c805e-7ebb-2682-a670-79a813a5e339, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.341433] env[62684]: INFO nova.compute.manager [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Took 42.38 seconds to build instance. [ 1838.384557] env[62684]: DEBUG oslo_concurrency.lockutils [req-52b0bd6d-149c-46dc-a88e-a06715901c7d req-6bbabc72-5114-43f5-828d-f6e8bc2dfa17 service nova] Releasing lock "refresh_cache-d06f3099-d05f-417f-a71a-7b368590624f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1838.467213] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 025dfe36-1f14-4bda-84a0-d424364b745b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1838.503197] env[62684]: DEBUG oslo_vmware.api [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': task-2052618, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.613908] env[62684]: DEBUG oslo_vmware.api [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d2f120-59e1-865b-6f96-923195c34aeb, 'name': SearchDatastore_Task, 'duration_secs': 0.009695} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1838.614273] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1838.614611] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] e2a9ab56-bde3-40b6-a214-19c77a9c6778/e2a9ab56-bde3-40b6-a214-19c77a9c6778.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1838.614925] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-21a355c6-5b36-4a03-9c84-d73583357aa7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.623945] env[62684]: DEBUG oslo_vmware.api [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Waiting for the task: (returnval){ [ 1838.623945] env[62684]: value = "task-2052619" [ 1838.623945] env[62684]: _type = "Task" [ 1838.623945] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1838.631596] env[62684]: DEBUG oslo_vmware.api [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': task-2052619, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.778083] env[62684]: DEBUG oslo_vmware.api [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]525c805e-7ebb-2682-a670-79a813a5e339, 'name': SearchDatastore_Task, 'duration_secs': 0.007964} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1838.778938] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e98522a-7a48-42ac-8da2-3ae03d632cbd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.786462] env[62684]: DEBUG oslo_vmware.api [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Waiting for the task: (returnval){ [ 1838.786462] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5256b9b7-235d-db0b-54af-dd9feaec1f6d" [ 1838.786462] env[62684]: _type = "Task" [ 1838.786462] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1838.795489] env[62684]: DEBUG oslo_vmware.api [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5256b9b7-235d-db0b-54af-dd9feaec1f6d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.843661] env[62684]: DEBUG oslo_concurrency.lockutils [None req-65a1e230-d82e-4039-8831-966f9892f2b7 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "e08f8636-5193-40fa-972c-f0ecab193fc1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.760s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1838.971289] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance b945f05d-ef1c-4469-9390-f7bbd4f435f0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1839.006851] env[62684]: DEBUG oslo_vmware.api [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': task-2052618, 'name': PowerOnVM_Task, 'duration_secs': 0.511428} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1839.007269] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1839.007522] env[62684]: INFO nova.compute.manager [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Took 8.69 seconds to spawn the instance on the hypervisor. [ 1839.007748] env[62684]: DEBUG nova.compute.manager [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1839.008753] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84f8ff4b-db5d-4c90-ac60-8b8ca275565f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.134918] env[62684]: DEBUG oslo_vmware.api [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': task-2052619, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.433106} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1839.135210] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] e2a9ab56-bde3-40b6-a214-19c77a9c6778/e2a9ab56-bde3-40b6-a214-19c77a9c6778.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1839.135461] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1839.135724] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7d537a76-d97b-4202-87ba-ee12804df4b8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.142659] env[62684]: DEBUG oslo_vmware.api [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Waiting for the task: (returnval){ [ 1839.142659] env[62684]: value = "task-2052620" [ 1839.142659] env[62684]: _type = "Task" [ 1839.142659] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.152164] env[62684]: DEBUG oslo_vmware.api [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': task-2052620, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.298652] env[62684]: DEBUG oslo_vmware.api [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5256b9b7-235d-db0b-54af-dd9feaec1f6d, 'name': SearchDatastore_Task, 'duration_secs': 0.020398} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1839.298652] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1839.298652] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] d06f3099-d05f-417f-a71a-7b368590624f/d06f3099-d05f-417f-a71a-7b368590624f.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1839.298928] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e2cb91c3-4f11-4f4f-a9e2-c0725cfca93a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.305578] env[62684]: DEBUG oslo_vmware.api [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Waiting for the task: (returnval){ [ 1839.305578] env[62684]: value = "task-2052621" [ 1839.305578] env[62684]: _type = "Task" [ 1839.305578] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.313372] env[62684]: DEBUG oslo_vmware.api [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Task: {'id': task-2052621, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.345199] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42ea0f39-ff65-4e57-93fe-d50bf147bbc2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.347971] env[62684]: DEBUG nova.compute.manager [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1839.354128] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ed02c82a-f864-4ad5-8b38-40098a291872 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Suspending the VM {{(pid=62684) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1839.354367] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-56aa4fcc-2ff6-41d1-9089-5c4e0479697f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.360493] env[62684]: DEBUG oslo_vmware.api [None req-ed02c82a-f864-4ad5-8b38-40098a291872 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1839.360493] env[62684]: value = "task-2052622" [ 1839.360493] env[62684]: _type = "Task" [ 1839.360493] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.368226] env[62684]: DEBUG oslo_vmware.api [None req-ed02c82a-f864-4ad5-8b38-40098a291872 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052622, 'name': SuspendVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.474669] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 50bc9674-d19c-40f1-a89f-1738a1e48307 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1839.527716] env[62684]: INFO nova.compute.manager [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Took 42.65 seconds to build instance. [ 1839.568055] env[62684]: DEBUG nova.compute.manager [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1839.568343] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baaa4d02-9d0a-4f84-a53b-f3ac49aa8a7d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.654320] env[62684]: DEBUG oslo_vmware.api [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': task-2052620, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066578} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1839.654652] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1839.655579] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44ece04c-83d4-4073-b87b-ec4e4c99703f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.677976] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Reconfiguring VM instance instance-00000023 to attach disk [datastore2] e2a9ab56-bde3-40b6-a214-19c77a9c6778/e2a9ab56-bde3-40b6-a214-19c77a9c6778.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1839.678364] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b3fb2537-6c3a-4676-b6f1-db27201cb2f5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.699357] env[62684]: DEBUG oslo_vmware.api [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Waiting for the task: (returnval){ [ 1839.699357] env[62684]: value = "task-2052623" [ 1839.699357] env[62684]: _type = "Task" [ 1839.699357] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.708646] env[62684]: DEBUG oslo_vmware.api [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': task-2052623, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.816210] env[62684]: DEBUG oslo_vmware.api [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Task: {'id': task-2052621, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.870825] env[62684]: DEBUG oslo_vmware.api [None req-ed02c82a-f864-4ad5-8b38-40098a291872 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052622, 'name': SuspendVM_Task} progress is 58%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.872568] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1839.978548] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1840.029555] env[62684]: DEBUG oslo_concurrency.lockutils [None req-afe62dcc-6d43-4b33-88ea-4a4d7443404d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Lock "b788c51b-367b-4eef-93d2-faa8836469b6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.442s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1840.079559] env[62684]: INFO nova.compute.manager [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] instance snapshotting [ 1840.082229] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af03a8c9-e955-4bee-b97b-e0d2322d7e96 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.100686] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4af9a62-10f6-43d3-8833-a74e72f8f985 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.210135] env[62684]: DEBUG oslo_vmware.api [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': task-2052623, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.317025] env[62684]: DEBUG oslo_vmware.api [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Task: {'id': task-2052621, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.554344} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.317328] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] d06f3099-d05f-417f-a71a-7b368590624f/d06f3099-d05f-417f-a71a-7b368590624f.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1840.318619] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1840.318619] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-461773e3-e3e5-4099-a2d1-747108f81ef2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.327199] env[62684]: DEBUG oslo_vmware.api [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Waiting for the task: (returnval){ [ 1840.327199] env[62684]: value = "task-2052624" [ 1840.327199] env[62684]: _type = "Task" [ 1840.327199] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1840.336448] env[62684]: DEBUG oslo_vmware.api [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Task: {'id': task-2052624, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.370697] env[62684]: DEBUG oslo_vmware.api [None req-ed02c82a-f864-4ad5-8b38-40098a291872 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052622, 'name': SuspendVM_Task} progress is 58%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.481491] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance b1f70e39-bf37-4fb8-b95b-653b59bec265 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1840.532815] env[62684]: DEBUG nova.compute.manager [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1840.610917] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Creating Snapshot of the VM instance {{(pid=62684) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1840.611400] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-4b83c423-403f-46b2-99e7-a6cf473f2faa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.619164] env[62684]: DEBUG oslo_vmware.api [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Waiting for the task: (returnval){ [ 1840.619164] env[62684]: value = "task-2052625" [ 1840.619164] env[62684]: _type = "Task" [ 1840.619164] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1840.627813] env[62684]: DEBUG oslo_vmware.api [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052625, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.711695] env[62684]: DEBUG oslo_vmware.api [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': task-2052623, 'name': ReconfigVM_Task, 'duration_secs': 0.839084} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.712059] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Reconfigured VM instance instance-00000023 to attach disk [datastore2] e2a9ab56-bde3-40b6-a214-19c77a9c6778/e2a9ab56-bde3-40b6-a214-19c77a9c6778.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1840.712713] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b76ae909-0301-4f5f-84de-15695a80aca3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.720034] env[62684]: DEBUG oslo_vmware.api [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Waiting for the task: (returnval){ [ 1840.720034] env[62684]: value = "task-2052626" [ 1840.720034] env[62684]: _type = "Task" [ 1840.720034] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1840.728861] env[62684]: DEBUG oslo_vmware.api [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': task-2052626, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.837210] env[62684]: DEBUG oslo_vmware.api [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Task: {'id': task-2052624, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.204586} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.837488] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1840.838264] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5487109-a9d9-4ffa-97fa-91684e7cfbbb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.860549] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Reconfiguring VM instance instance-00000022 to attach disk [datastore1] d06f3099-d05f-417f-a71a-7b368590624f/d06f3099-d05f-417f-a71a-7b368590624f.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1840.860839] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-45cb2d67-f057-44f7-bc38-0aec6519d7a2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.884459] env[62684]: DEBUG oslo_vmware.api [None req-ed02c82a-f864-4ad5-8b38-40098a291872 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052622, 'name': SuspendVM_Task, 'duration_secs': 1.122168} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.885694] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ed02c82a-f864-4ad5-8b38-40098a291872 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Suspended the VM {{(pid=62684) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1840.885885] env[62684]: DEBUG nova.compute.manager [None req-ed02c82a-f864-4ad5-8b38-40098a291872 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1840.886223] env[62684]: DEBUG oslo_vmware.api [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Waiting for the task: (returnval){ [ 1840.886223] env[62684]: value = "task-2052627" [ 1840.886223] env[62684]: _type = "Task" [ 1840.886223] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1840.886914] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfcda608-cd9c-41aa-95da-55db8808920f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.896830] env[62684]: DEBUG oslo_vmware.api [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Task: {'id': task-2052627, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.985032] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 26303c0e-be87-41ff-a15c-e92f91f8a05f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1841.052975] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1841.129608] env[62684]: DEBUG oslo_vmware.api [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052625, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.136625] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Acquiring lock "b788c51b-367b-4eef-93d2-faa8836469b6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1841.136881] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Lock "b788c51b-367b-4eef-93d2-faa8836469b6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1841.137122] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Acquiring lock "b788c51b-367b-4eef-93d2-faa8836469b6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1841.137307] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Lock "b788c51b-367b-4eef-93d2-faa8836469b6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1841.137478] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Lock "b788c51b-367b-4eef-93d2-faa8836469b6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.139702] env[62684]: INFO nova.compute.manager [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Terminating instance [ 1841.141453] env[62684]: DEBUG nova.compute.manager [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1841.141706] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1841.142535] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d767b15-3b7c-49f9-8208-2a52cd9a2887 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.149924] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1841.151047] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a5315032-38c1-4556-8463-6373427bf3e1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.157399] env[62684]: DEBUG oslo_vmware.api [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Waiting for the task: (returnval){ [ 1841.157399] env[62684]: value = "task-2052628" [ 1841.157399] env[62684]: _type = "Task" [ 1841.157399] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1841.165788] env[62684]: DEBUG oslo_vmware.api [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': task-2052628, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.230478] env[62684]: DEBUG oslo_vmware.api [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': task-2052626, 'name': Rename_Task, 'duration_secs': 0.402003} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1841.231893] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1841.231893] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ecb44994-71d1-4edf-b4ba-dfc341462880 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.238727] env[62684]: DEBUG oslo_vmware.api [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Waiting for the task: (returnval){ [ 1841.238727] env[62684]: value = "task-2052629" [ 1841.238727] env[62684]: _type = "Task" [ 1841.238727] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1841.248221] env[62684]: DEBUG oslo_vmware.api [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': task-2052629, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.399358] env[62684]: DEBUG oslo_vmware.api [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Task: {'id': task-2052627, 'name': ReconfigVM_Task, 'duration_secs': 0.283247} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1841.399712] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Reconfigured VM instance instance-00000022 to attach disk [datastore1] d06f3099-d05f-417f-a71a-7b368590624f/d06f3099-d05f-417f-a71a-7b368590624f.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1841.400364] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f76112fe-871c-4bf2-8db9-dcfc70608b77 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.409011] env[62684]: DEBUG oslo_vmware.api [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Waiting for the task: (returnval){ [ 1841.409011] env[62684]: value = "task-2052630" [ 1841.409011] env[62684]: _type = "Task" [ 1841.409011] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1841.417611] env[62684]: DEBUG oslo_vmware.api [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Task: {'id': task-2052630, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.490897] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance ca3d1a73-6f3b-4278-8fe7-03b66f407ba6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1841.631883] env[62684]: DEBUG oslo_vmware.api [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052625, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.666775] env[62684]: DEBUG oslo_vmware.api [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': task-2052628, 'name': PowerOffVM_Task, 'duration_secs': 0.360767} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1841.667074] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1841.667257] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1841.667561] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c2948f6f-99b1-41a1-8c11-f26803ea0f25 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.749692] env[62684]: DEBUG oslo_vmware.api [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': task-2052629, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.835679] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1841.835918] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1841.836141] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Deleting the datastore file [datastore1] b788c51b-367b-4eef-93d2-faa8836469b6 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1841.836468] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f3981a46-4e92-4813-9e38-abbc6b2ed946 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.843285] env[62684]: DEBUG oslo_vmware.api [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Waiting for the task: (returnval){ [ 1841.843285] env[62684]: value = "task-2052632" [ 1841.843285] env[62684]: _type = "Task" [ 1841.843285] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1841.853298] env[62684]: DEBUG oslo_vmware.api [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': task-2052632, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.919147] env[62684]: DEBUG oslo_vmware.api [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Task: {'id': task-2052630, 'name': Rename_Task, 'duration_secs': 0.233447} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1841.919537] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1841.919835] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c3a1845c-b81a-4827-9f1c-eab36d67080b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.926266] env[62684]: DEBUG oslo_vmware.api [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Waiting for the task: (returnval){ [ 1841.926266] env[62684]: value = "task-2052633" [ 1841.926266] env[62684]: _type = "Task" [ 1841.926266] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1841.937723] env[62684]: DEBUG oslo_vmware.api [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Task: {'id': task-2052633, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.994941] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 02dc8c41-5092-4f84-9722-37d4df3a459a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1841.994941] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Total usable vcpus: 48, total allocated vcpus: 20 {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1841.995162] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4416MB phys_disk=200GB used_disk=19GB total_vcpus=48 used_vcpus=20 pci_stats=[] {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1842.130185] env[62684]: DEBUG oslo_vmware.api [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052625, 'name': CreateSnapshot_Task, 'duration_secs': 1.183402} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1842.130488] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Created Snapshot of the VM instance {{(pid=62684) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1842.131433] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32852e94-204f-4f6d-b825-ec8026379f7e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.251498] env[62684]: DEBUG oslo_vmware.api [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': task-2052629, 'name': PowerOnVM_Task} progress is 82%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.358628] env[62684]: DEBUG oslo_vmware.api [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Task: {'id': task-2052632, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.23574} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1842.358923] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1842.360188] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1842.360459] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1842.360660] env[62684]: INFO nova.compute.manager [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1842.360971] env[62684]: DEBUG oslo.service.loopingcall [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1842.361527] env[62684]: DEBUG nova.compute.manager [-] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1842.361527] env[62684]: DEBUG nova.network.neutron [-] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1842.440389] env[62684]: DEBUG oslo_vmware.api [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Task: {'id': task-2052633, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.568050] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb0e08b5-cee8-4fc4-9088-6e616e266685 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.575433] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e30c033-f103-4652-8cdb-4e3452db1ff5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.612598] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb228fa9-a8cb-4d1f-8697-e8b8724d1e8e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.619892] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55a31a4d-9339-4de7-b159-018f1b698441 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.635423] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1842.639934] env[62684]: DEBUG nova.compute.manager [req-748cc332-8f72-4a37-8871-ccd6155886fa req-511baef3-7188-4f6c-9cd8-17924861b130 service nova] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Received event network-vif-deleted-6f9879b5-4fa4-4afb-a3a5-8ee16a495f70 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1842.639934] env[62684]: INFO nova.compute.manager [req-748cc332-8f72-4a37-8871-ccd6155886fa req-511baef3-7188-4f6c-9cd8-17924861b130 service nova] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Neutron deleted interface 6f9879b5-4fa4-4afb-a3a5-8ee16a495f70; detaching it from the instance and deleting it from the info cache [ 1842.639934] env[62684]: DEBUG nova.network.neutron [req-748cc332-8f72-4a37-8871-ccd6155886fa req-511baef3-7188-4f6c-9cd8-17924861b130 service nova] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1842.652488] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Creating linked-clone VM from snapshot {{(pid=62684) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1842.653554] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-a54985d1-a5f2-4fc6-babb-ea58a36504c7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.661541] env[62684]: DEBUG oslo_vmware.api [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Waiting for the task: (returnval){ [ 1842.661541] env[62684]: value = "task-2052634" [ 1842.661541] env[62684]: _type = "Task" [ 1842.661541] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1842.671561] env[62684]: DEBUG oslo_vmware.api [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052634, 'name': CloneVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.722052] env[62684]: DEBUG nova.compute.manager [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1842.723338] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de1f2fc4-6139-4427-81a8-573b8a59fbe5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.751807] env[62684]: DEBUG oslo_vmware.api [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': task-2052629, 'name': PowerOnVM_Task, 'duration_secs': 1.056021} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1842.752147] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1842.752450] env[62684]: INFO nova.compute.manager [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Took 6.34 seconds to spawn the instance on the hypervisor. [ 1842.752719] env[62684]: DEBUG nova.compute.manager [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1842.753848] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a89690e0-d742-4def-bc5c-e12dfda31ea9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.938926] env[62684]: DEBUG oslo_vmware.api [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Task: {'id': task-2052633, 'name': PowerOnVM_Task, 'duration_secs': 0.598051} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1842.939340] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1842.939645] env[62684]: INFO nova.compute.manager [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Took 9.22 seconds to spawn the instance on the hypervisor. [ 1842.939877] env[62684]: DEBUG nova.compute.manager [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1842.940762] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84b5b4a1-a364-43f4-8109-ae259367d676 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.117780] env[62684]: DEBUG nova.network.neutron [-] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1843.138499] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1843.141783] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a8591447-b788-4921-aaf4-faca79fb97c7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.151475] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1c101cc-9b46-4bea-8d35-2f251cdfc8e3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.171330] env[62684]: DEBUG oslo_vmware.api [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052634, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.186622] env[62684]: DEBUG nova.compute.manager [req-748cc332-8f72-4a37-8871-ccd6155886fa req-511baef3-7188-4f6c-9cd8-17924861b130 service nova] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Detach interface failed, port_id=6f9879b5-4fa4-4afb-a3a5-8ee16a495f70, reason: Instance b788c51b-367b-4eef-93d2-faa8836469b6 could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1843.236995] env[62684]: INFO nova.compute.manager [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] instance snapshotting [ 1843.237254] env[62684]: WARNING nova.compute.manager [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 1843.240226] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed73f795-4441-4c35-9507-e72c57944e22 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.259384] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb5cd528-52b4-47c9-bb64-111d3e7de0d3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.274837] env[62684]: INFO nova.compute.manager [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Took 39.43 seconds to build instance. [ 1843.461050] env[62684]: INFO nova.compute.manager [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Took 45.48 seconds to build instance. [ 1843.620521] env[62684]: INFO nova.compute.manager [-] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Took 1.26 seconds to deallocate network for instance. [ 1843.646775] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62684) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1843.646956] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 9.278s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1843.647289] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f6d61b2e-b4ea-41c4-a3ce-d05ef53dfd04 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.403s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1843.648121] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f6d61b2e-b4ea-41c4-a3ce-d05ef53dfd04 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1843.649732] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5304a8c6-4bc7-4303-9b48-aa2fdb45bd63 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.238s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1843.649928] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5304a8c6-4bc7-4303-9b48-aa2fdb45bd63 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1843.651783] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.404s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1843.653395] env[62684]: INFO nova.compute.claims [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1843.656216] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1843.656380] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Cleaning up deleted instances {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 1843.673561] env[62684]: DEBUG oslo_vmware.api [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052634, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.690441] env[62684]: INFO nova.scheduler.client.report [None req-5304a8c6-4bc7-4303-9b48-aa2fdb45bd63 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Deleted allocations for instance ca22ca59-1b60-46f0-ae83-03ed4002fa0d [ 1843.692473] env[62684]: INFO nova.scheduler.client.report [None req-f6d61b2e-b4ea-41c4-a3ce-d05ef53dfd04 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Deleted allocations for instance 17d30180-9770-4329-a6d8-757a93514a96 [ 1843.778252] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Creating Snapshot of the VM instance {{(pid=62684) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1843.778252] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fe6f05f8-574d-40fd-bd44-a0b36d9c9cd6 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Lock "e2a9ab56-bde3-40b6-a214-19c77a9c6778" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.279s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1843.778252] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-620b093a-f686-4a59-aaaa-84374b9fbae5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.787902] env[62684]: DEBUG oslo_vmware.api [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1843.787902] env[62684]: value = "task-2052635" [ 1843.787902] env[62684]: _type = "Task" [ 1843.787902] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.805563] env[62684]: DEBUG oslo_vmware.api [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052635, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.962680] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cbe5a925-dc9d-44bd-a04b-553405e6d989 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Lock "d06f3099-d05f-417f-a71a-7b368590624f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.545s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1844.127299] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1844.165104] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] There are 10 instances to clean {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 1844.165426] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: a4767855-0c1d-48c8-98cc-6532ff140b5c] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1844.178440] env[62684]: DEBUG oslo_vmware.api [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052634, 'name': CloneVM_Task, 'duration_secs': 1.493937} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1844.178725] env[62684]: INFO nova.virt.vmwareapi.vmops [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Created linked-clone VM from snapshot [ 1844.179544] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0abffd8f-7206-43af-bd78-5e3491fb6a1b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.187148] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Uploading image b28939e6-697c-4d7e-b45d-fc69ecb176b1 {{(pid=62684) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1844.201073] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5304a8c6-4bc7-4303-9b48-aa2fdb45bd63 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Lock "ca22ca59-1b60-46f0-ae83-03ed4002fa0d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.815s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1844.206365] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f6d61b2e-b4ea-41c4-a3ce-d05ef53dfd04 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818 tempest-FloatingIPsAssociationNegativeTestJSON-1059335818-project-member] Lock "17d30180-9770-4329-a6d8-757a93514a96" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.477s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1844.214553] env[62684]: DEBUG oslo_vmware.rw_handles [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1844.214553] env[62684]: value = "vm-421224" [ 1844.214553] env[62684]: _type = "VirtualMachine" [ 1844.214553] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1844.215068] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-c7ebb7cd-2b85-4a1b-bafe-5d248a63f55d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.225596] env[62684]: DEBUG oslo_vmware.rw_handles [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Lease: (returnval){ [ 1844.225596] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529d4a3c-d3a6-f9c7-8f90-79b4a45a6119" [ 1844.225596] env[62684]: _type = "HttpNfcLease" [ 1844.225596] env[62684]: } obtained for exporting VM: (result){ [ 1844.225596] env[62684]: value = "vm-421224" [ 1844.225596] env[62684]: _type = "VirtualMachine" [ 1844.225596] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1844.225863] env[62684]: DEBUG oslo_vmware.api [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Waiting for the lease: (returnval){ [ 1844.225863] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529d4a3c-d3a6-f9c7-8f90-79b4a45a6119" [ 1844.225863] env[62684]: _type = "HttpNfcLease" [ 1844.225863] env[62684]: } to be ready. {{(pid=62684) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1844.233198] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1844.233198] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529d4a3c-d3a6-f9c7-8f90-79b4a45a6119" [ 1844.233198] env[62684]: _type = "HttpNfcLease" [ 1844.233198] env[62684]: } is initializing. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1844.243065] env[62684]: DEBUG nova.compute.manager [None req-be58f108-6627-40aa-8ee4-53cc42d1cda5 tempest-ServerExternalEventsTest-770296000 tempest-ServerExternalEventsTest-770296000-project] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Received event network-changed {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1844.243284] env[62684]: DEBUG nova.compute.manager [None req-be58f108-6627-40aa-8ee4-53cc42d1cda5 tempest-ServerExternalEventsTest-770296000 tempest-ServerExternalEventsTest-770296000-project] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Refreshing instance network info cache due to event network-changed. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1844.243523] env[62684]: DEBUG oslo_concurrency.lockutils [None req-be58f108-6627-40aa-8ee4-53cc42d1cda5 tempest-ServerExternalEventsTest-770296000 tempest-ServerExternalEventsTest-770296000-project] Acquiring lock "refresh_cache-d06f3099-d05f-417f-a71a-7b368590624f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1844.243670] env[62684]: DEBUG oslo_concurrency.lockutils [None req-be58f108-6627-40aa-8ee4-53cc42d1cda5 tempest-ServerExternalEventsTest-770296000 tempest-ServerExternalEventsTest-770296000-project] Acquired lock "refresh_cache-d06f3099-d05f-417f-a71a-7b368590624f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1844.243832] env[62684]: DEBUG nova.network.neutron [None req-be58f108-6627-40aa-8ee4-53cc42d1cda5 tempest-ServerExternalEventsTest-770296000 tempest-ServerExternalEventsTest-770296000-project] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1844.264569] env[62684]: INFO nova.compute.manager [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Rebuilding instance [ 1844.282651] env[62684]: DEBUG nova.compute.manager [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1844.298883] env[62684]: DEBUG oslo_vmware.api [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052635, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.307229] env[62684]: DEBUG nova.compute.manager [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1844.308139] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f018d71c-18b8-4313-915a-84d282fed0e4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.465785] env[62684]: DEBUG nova.compute.manager [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1844.671199] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: aec16a15-5d75-4ea6-800b-1bf67f762d89] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1844.738234] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1844.738234] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529d4a3c-d3a6-f9c7-8f90-79b4a45a6119" [ 1844.738234] env[62684]: _type = "HttpNfcLease" [ 1844.738234] env[62684]: } is ready. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1844.738680] env[62684]: DEBUG oslo_vmware.rw_handles [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1844.738680] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529d4a3c-d3a6-f9c7-8f90-79b4a45a6119" [ 1844.738680] env[62684]: _type = "HttpNfcLease" [ 1844.738680] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1844.739475] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f405e57-8849-461a-ac1e-f1c922ba7c90 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.752134] env[62684]: DEBUG oslo_vmware.rw_handles [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5254c8fb-7a8e-143a-1aa0-1e96e15ebb94/disk-0.vmdk from lease info. {{(pid=62684) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1844.752471] env[62684]: DEBUG oslo_vmware.rw_handles [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5254c8fb-7a8e-143a-1aa0-1e96e15ebb94/disk-0.vmdk for reading. {{(pid=62684) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1844.823093] env[62684]: DEBUG oslo_concurrency.lockutils [None req-83b12138-2343-421d-a96e-3bec366690c7 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Acquiring lock "d06f3099-d05f-417f-a71a-7b368590624f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1844.823349] env[62684]: DEBUG oslo_concurrency.lockutils [None req-83b12138-2343-421d-a96e-3bec366690c7 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Lock "d06f3099-d05f-417f-a71a-7b368590624f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1844.823618] env[62684]: DEBUG oslo_concurrency.lockutils [None req-83b12138-2343-421d-a96e-3bec366690c7 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Acquiring lock "d06f3099-d05f-417f-a71a-7b368590624f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1844.823813] env[62684]: DEBUG oslo_concurrency.lockutils [None req-83b12138-2343-421d-a96e-3bec366690c7 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Lock "d06f3099-d05f-417f-a71a-7b368590624f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1844.823995] env[62684]: DEBUG oslo_concurrency.lockutils [None req-83b12138-2343-421d-a96e-3bec366690c7 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Lock "d06f3099-d05f-417f-a71a-7b368590624f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1844.830239] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1844.831340] env[62684]: INFO nova.compute.manager [None req-83b12138-2343-421d-a96e-3bec366690c7 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Terminating instance [ 1844.832996] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-234d282e-e709-4acc-a63a-3613ac01e282 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.835954] env[62684]: DEBUG nova.compute.manager [None req-83b12138-2343-421d-a96e-3bec366690c7 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1844.836205] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-83b12138-2343-421d-a96e-3bec366690c7 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1844.837136] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98e829b9-b8d3-4f13-b7d9-ca35e4975749 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.845069] env[62684]: DEBUG oslo_vmware.api [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Waiting for the task: (returnval){ [ 1844.845069] env[62684]: value = "task-2052637" [ 1844.845069] env[62684]: _type = "Task" [ 1844.845069] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1844.845069] env[62684]: DEBUG oslo_vmware.api [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052635, 'name': CreateSnapshot_Task, 'duration_secs': 0.988693} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1844.846106] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1844.847104] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Created Snapshot of the VM instance {{(pid=62684) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1844.853891] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-201f7799-4455-4b38-b9af-8efb52516a22 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.859486] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-83b12138-2343-421d-a96e-3bec366690c7 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1844.860432] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b8679993-89df-4e0e-af87-d09652b0fe71 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.870403] env[62684]: DEBUG oslo_vmware.api [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': task-2052637, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.875852] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-c10edd4e-cf29-409a-ad60-7dd3a09580d9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.881141] env[62684]: DEBUG oslo_vmware.api [None req-83b12138-2343-421d-a96e-3bec366690c7 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Waiting for the task: (returnval){ [ 1844.881141] env[62684]: value = "task-2052638" [ 1844.881141] env[62684]: _type = "Task" [ 1844.881141] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1844.890647] env[62684]: DEBUG oslo_vmware.api [None req-83b12138-2343-421d-a96e-3bec366690c7 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Task: {'id': task-2052638, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.986928] env[62684]: DEBUG oslo_concurrency.lockutils [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1845.003959] env[62684]: DEBUG oslo_concurrency.lockutils [None req-280129b5-0d5d-41eb-a325-508f42239ae5 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Acquiring lock "fb7f38a0-bcfa-4d96-bde3-20d6f1d70112" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1845.004333] env[62684]: DEBUG oslo_concurrency.lockutils [None req-280129b5-0d5d-41eb-a325-508f42239ae5 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Lock "fb7f38a0-bcfa-4d96-bde3-20d6f1d70112" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1845.004581] env[62684]: DEBUG oslo_concurrency.lockutils [None req-280129b5-0d5d-41eb-a325-508f42239ae5 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Acquiring lock "fb7f38a0-bcfa-4d96-bde3-20d6f1d70112-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1845.004781] env[62684]: DEBUG oslo_concurrency.lockutils [None req-280129b5-0d5d-41eb-a325-508f42239ae5 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Lock "fb7f38a0-bcfa-4d96-bde3-20d6f1d70112-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1845.004949] env[62684]: DEBUG oslo_concurrency.lockutils [None req-280129b5-0d5d-41eb-a325-508f42239ae5 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Lock "fb7f38a0-bcfa-4d96-bde3-20d6f1d70112-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1845.010244] env[62684]: INFO nova.compute.manager [None req-280129b5-0d5d-41eb-a325-508f42239ae5 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Terminating instance [ 1845.012295] env[62684]: DEBUG nova.compute.manager [None req-280129b5-0d5d-41eb-a325-508f42239ae5 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1845.012484] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-280129b5-0d5d-41eb-a325-508f42239ae5 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1845.013328] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95043f34-447d-490f-be1f-677997f2e890 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.021769] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-280129b5-0d5d-41eb-a325-508f42239ae5 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1845.022040] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-85e529e7-f840-4099-a1fb-193c448a9983 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.028360] env[62684]: DEBUG oslo_vmware.api [None req-280129b5-0d5d-41eb-a325-508f42239ae5 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Waiting for the task: (returnval){ [ 1845.028360] env[62684]: value = "task-2052639" [ 1845.028360] env[62684]: _type = "Task" [ 1845.028360] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.041830] env[62684]: DEBUG oslo_vmware.api [None req-280129b5-0d5d-41eb-a325-508f42239ae5 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': task-2052639, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.179083] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 43d28811-26e4-4016-9f82-98349d4a05b7] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1845.183973] env[62684]: DEBUG nova.network.neutron [None req-be58f108-6627-40aa-8ee4-53cc42d1cda5 tempest-ServerExternalEventsTest-770296000 tempest-ServerExternalEventsTest-770296000-project] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Updating instance_info_cache with network_info: [{"id": "f99cf594-91e3-4f98-85ce-def4475f0620", "address": "fa:16:3e:7e:2e:12", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.199", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf99cf594-91", "ovs_interfaceid": "f99cf594-91e3-4f98-85ce-def4475f0620", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1845.361867] env[62684]: DEBUG oslo_vmware.api [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': task-2052637, 'name': PowerOffVM_Task, 'duration_secs': 0.133558} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.361867] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1845.361867] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1845.362420] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f713a94a-54e0-47bf-8e9f-4f26bcd0d4a6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.369451] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1845.370639] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0f1115fa-3e0b-4ccd-b9f8-9c0e740b1844 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.372782] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-459e5047-b778-41d1-a8b9-488f639ae41b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.390832] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Creating linked-clone VM from snapshot {{(pid=62684) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1845.391975] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-68ef1c6c-035c-4623-ba0f-214ee759499e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.403432] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75077bb1-b69f-4d0f-8d94-545b67fe0ea0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.412309] env[62684]: DEBUG oslo_vmware.api [None req-83b12138-2343-421d-a96e-3bec366690c7 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Task: {'id': task-2052638, 'name': PowerOffVM_Task, 'duration_secs': 0.194284} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.412784] env[62684]: DEBUG oslo_vmware.api [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1845.412784] env[62684]: value = "task-2052641" [ 1845.412784] env[62684]: _type = "Task" [ 1845.412784] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.413726] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-83b12138-2343-421d-a96e-3bec366690c7 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1845.414055] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-83b12138-2343-421d-a96e-3bec366690c7 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1845.414562] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-184e73ff-c654-4140-bc9a-d0a81bbf12ac {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.449876] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99bd6c4f-b0b6-4c63-8cc0-ad305f6daab3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.452965] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1845.453738] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1845.454209] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Deleting the datastore file [datastore2] e2a9ab56-bde3-40b6-a214-19c77a9c6778 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1845.458190] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ca7d9ee4-3f4d-44eb-b2ad-66cc76460f4a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.460230] env[62684]: DEBUG oslo_vmware.api [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052641, 'name': CloneVM_Task} progress is 16%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.465726] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d39ced28-8ded-4330-b501-a74d4720b7fe {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.471408] env[62684]: DEBUG oslo_vmware.api [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Waiting for the task: (returnval){ [ 1845.471408] env[62684]: value = "task-2052643" [ 1845.471408] env[62684]: _type = "Task" [ 1845.471408] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.484701] env[62684]: DEBUG nova.compute.provider_tree [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1845.492261] env[62684]: DEBUG oslo_vmware.api [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': task-2052643, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.541057] env[62684]: DEBUG oslo_vmware.api [None req-280129b5-0d5d-41eb-a325-508f42239ae5 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': task-2052639, 'name': PowerOffVM_Task, 'duration_secs': 0.191593} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.541414] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-280129b5-0d5d-41eb-a325-508f42239ae5 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1845.541705] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-280129b5-0d5d-41eb-a325-508f42239ae5 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1845.543160] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-184acab3-c9f4-4110-8634-1e91bd5eb03d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.554897] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-83b12138-2343-421d-a96e-3bec366690c7 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1845.555404] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-83b12138-2343-421d-a96e-3bec366690c7 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1845.555630] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-83b12138-2343-421d-a96e-3bec366690c7 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Deleting the datastore file [datastore1] d06f3099-d05f-417f-a71a-7b368590624f {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1845.556064] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6fbdbfd9-61fc-4d24-b08e-0e67c3794392 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.564206] env[62684]: DEBUG oslo_vmware.api [None req-83b12138-2343-421d-a96e-3bec366690c7 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Waiting for the task: (returnval){ [ 1845.564206] env[62684]: value = "task-2052645" [ 1845.564206] env[62684]: _type = "Task" [ 1845.564206] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.574933] env[62684]: DEBUG oslo_vmware.api [None req-83b12138-2343-421d-a96e-3bec366690c7 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Task: {'id': task-2052645, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.654486] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-280129b5-0d5d-41eb-a325-508f42239ae5 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1845.654827] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-280129b5-0d5d-41eb-a325-508f42239ae5 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1845.655151] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-280129b5-0d5d-41eb-a325-508f42239ae5 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Deleting the datastore file [datastore1] fb7f38a0-bcfa-4d96-bde3-20d6f1d70112 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1845.655504] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6f340ed2-999c-4077-8dd9-e339fa7bff2e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.662169] env[62684]: DEBUG oslo_vmware.api [None req-280129b5-0d5d-41eb-a325-508f42239ae5 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Waiting for the task: (returnval){ [ 1845.662169] env[62684]: value = "task-2052646" [ 1845.662169] env[62684]: _type = "Task" [ 1845.662169] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.674138] env[62684]: DEBUG oslo_vmware.api [None req-280129b5-0d5d-41eb-a325-508f42239ae5 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': task-2052646, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.683238] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: e1540aa6-12a4-4cff-a444-d47ee66c78d7] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1845.687790] env[62684]: DEBUG oslo_concurrency.lockutils [None req-be58f108-6627-40aa-8ee4-53cc42d1cda5 tempest-ServerExternalEventsTest-770296000 tempest-ServerExternalEventsTest-770296000-project] Releasing lock "refresh_cache-d06f3099-d05f-417f-a71a-7b368590624f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1845.928460] env[62684]: DEBUG oslo_vmware.api [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052641, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.983685] env[62684]: DEBUG oslo_vmware.api [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': task-2052643, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138183} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.984036] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1845.985223] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1845.985223] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1845.989638] env[62684]: DEBUG nova.scheduler.client.report [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1846.077267] env[62684]: DEBUG oslo_vmware.api [None req-83b12138-2343-421d-a96e-3bec366690c7 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Task: {'id': task-2052645, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155869} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.077850] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-83b12138-2343-421d-a96e-3bec366690c7 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1846.078355] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-83b12138-2343-421d-a96e-3bec366690c7 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1846.078642] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-83b12138-2343-421d-a96e-3bec366690c7 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1846.079042] env[62684]: INFO nova.compute.manager [None req-83b12138-2343-421d-a96e-3bec366690c7 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1846.079329] env[62684]: DEBUG oslo.service.loopingcall [None req-83b12138-2343-421d-a96e-3bec366690c7 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1846.079696] env[62684]: DEBUG nova.compute.manager [-] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1846.079845] env[62684]: DEBUG nova.network.neutron [-] [instance: d06f3099-d05f-417f-a71a-7b368590624f] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1846.172908] env[62684]: DEBUG oslo_vmware.api [None req-280129b5-0d5d-41eb-a325-508f42239ae5 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Task: {'id': task-2052646, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.182991} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.173322] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-280129b5-0d5d-41eb-a325-508f42239ae5 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1846.173623] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-280129b5-0d5d-41eb-a325-508f42239ae5 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1846.173872] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-280129b5-0d5d-41eb-a325-508f42239ae5 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1846.174108] env[62684]: INFO nova.compute.manager [None req-280129b5-0d5d-41eb-a325-508f42239ae5 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1846.174457] env[62684]: DEBUG oslo.service.loopingcall [None req-280129b5-0d5d-41eb-a325-508f42239ae5 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1846.174744] env[62684]: DEBUG nova.compute.manager [-] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1846.174936] env[62684]: DEBUG nova.network.neutron [-] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1846.189908] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 91869c00-edd0-40a8-84df-d8842d750558] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1846.426471] env[62684]: DEBUG oslo_vmware.api [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052641, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.464935] env[62684]: DEBUG nova.compute.manager [req-674e5a38-f88b-456c-bfb7-ad83ae3a088a req-aa8562f3-79be-41b3-9a9f-cf4feb5b1d26 service nova] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Received event network-vif-deleted-eab61e5f-3e13-43bd-8778-1f31e15ef593 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1846.465207] env[62684]: INFO nova.compute.manager [req-674e5a38-f88b-456c-bfb7-ad83ae3a088a req-aa8562f3-79be-41b3-9a9f-cf4feb5b1d26 service nova] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Neutron deleted interface eab61e5f-3e13-43bd-8778-1f31e15ef593; detaching it from the instance and deleting it from the info cache [ 1846.465325] env[62684]: DEBUG nova.network.neutron [req-674e5a38-f88b-456c-bfb7-ad83ae3a088a req-aa8562f3-79be-41b3-9a9f-cf4feb5b1d26 service nova] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1846.498393] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.846s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1846.499679] env[62684]: DEBUG nova.compute.manager [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1846.502979] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.646s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1846.504141] env[62684]: INFO nova.compute.claims [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1846.522125] env[62684]: DEBUG nova.compute.manager [req-2b1e6a68-50a2-48b7-abff-8d9e99efe0c2 req-5e5ba3b3-4362-4816-bd3d-8fcb9fa94e97 service nova] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Received event network-vif-deleted-f99cf594-91e3-4f98-85ce-def4475f0620 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1846.522125] env[62684]: INFO nova.compute.manager [req-2b1e6a68-50a2-48b7-abff-8d9e99efe0c2 req-5e5ba3b3-4362-4816-bd3d-8fcb9fa94e97 service nova] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Neutron deleted interface f99cf594-91e3-4f98-85ce-def4475f0620; detaching it from the instance and deleting it from the info cache [ 1846.522125] env[62684]: DEBUG nova.network.neutron [req-2b1e6a68-50a2-48b7-abff-8d9e99efe0c2 req-5e5ba3b3-4362-4816-bd3d-8fcb9fa94e97 service nova] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1846.691177] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 8d53d8c3-6db8-4ebe-a35f-0f64602fafcb] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1846.929249] env[62684]: DEBUG oslo_vmware.api [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052641, 'name': CloneVM_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.931805] env[62684]: DEBUG nova.network.neutron [-] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1846.968745] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9398d23f-5c19-4757-988d-f54d9c3da4d4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.978672] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f9b8b92-1fae-443e-b186-9a48acceca4c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.003048] env[62684]: DEBUG nova.network.neutron [-] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1847.014390] env[62684]: DEBUG nova.compute.utils [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1847.029403] env[62684]: DEBUG nova.compute.manager [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1847.029672] env[62684]: DEBUG nova.network.neutron [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1847.032874] env[62684]: DEBUG nova.compute.manager [req-674e5a38-f88b-456c-bfb7-ad83ae3a088a req-aa8562f3-79be-41b3-9a9f-cf4feb5b1d26 service nova] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Detach interface failed, port_id=eab61e5f-3e13-43bd-8778-1f31e15ef593, reason: Instance fb7f38a0-bcfa-4d96-bde3-20d6f1d70112 could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1847.033679] env[62684]: DEBUG nova.compute.manager [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1847.037891] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8be63827-7410-4d1c-8132-16390dfc58cb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.047993] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e1cb327-c4a9-4cff-9c1c-1b49182b594d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.060728] env[62684]: DEBUG nova.virt.hardware [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1847.064562] env[62684]: DEBUG nova.virt.hardware [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1847.064562] env[62684]: DEBUG nova.virt.hardware [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1847.064562] env[62684]: DEBUG nova.virt.hardware [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1847.064562] env[62684]: DEBUG nova.virt.hardware [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1847.064562] env[62684]: DEBUG nova.virt.hardware [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1847.064562] env[62684]: DEBUG nova.virt.hardware [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1847.065041] env[62684]: DEBUG nova.virt.hardware [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1847.065041] env[62684]: DEBUG nova.virt.hardware [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1847.065041] env[62684]: DEBUG nova.virt.hardware [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1847.065041] env[62684]: DEBUG nova.virt.hardware [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1847.065041] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0bbb720-d85e-4be5-a3bb-7300d3968332 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.091675] env[62684]: DEBUG nova.compute.manager [req-2b1e6a68-50a2-48b7-abff-8d9e99efe0c2 req-5e5ba3b3-4362-4816-bd3d-8fcb9fa94e97 service nova] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Detach interface failed, port_id=f99cf594-91e3-4f98-85ce-def4475f0620, reason: Instance d06f3099-d05f-417f-a71a-7b368590624f could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1847.095272] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebeed2d3-02fc-427b-8e8f-4e13333ef4a7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.101358] env[62684]: DEBUG nova.policy [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4f9ace4d78b94a3db9eb74236fca1e6a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aef5d7061c834332b9f9c5c75596bf08', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1847.116201] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Instance VIF info [] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1847.122239] env[62684]: DEBUG oslo.service.loopingcall [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1847.122611] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1847.122900] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5d64ac71-1cec-4791-8a32-f51b7721ec4b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.141264] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1847.141264] env[62684]: value = "task-2052647" [ 1847.141264] env[62684]: _type = "Task" [ 1847.141264] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1847.150263] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052647, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.196018] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 4cbcfa1a-c034-4de7-ad25-4ad22316067e] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1847.426373] env[62684]: DEBUG oslo_vmware.api [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052641, 'name': CloneVM_Task, 'duration_secs': 1.538608} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1847.426756] env[62684]: INFO nova.virt.vmwareapi.vmops [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Created linked-clone VM from snapshot [ 1847.427548] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8c79121-61ec-4e47-8521-12da1c3f8dec {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.436231] env[62684]: INFO nova.compute.manager [-] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Took 1.26 seconds to deallocate network for instance. [ 1847.436628] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Uploading image 08ff9f3e-529a-4533-8ecd-7c43601c17d3 {{(pid=62684) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1847.445097] env[62684]: DEBUG nova.network.neutron [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Successfully created port: 1b1935d6-812b-4914-a0c8-8cfc6f58c715 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1847.461077] env[62684]: DEBUG oslo_vmware.rw_handles [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1847.461077] env[62684]: value = "vm-421226" [ 1847.461077] env[62684]: _type = "VirtualMachine" [ 1847.461077] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1847.461388] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-42cffaab-4ece-4012-acca-c1154508a20d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.470109] env[62684]: DEBUG oslo_vmware.rw_handles [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lease: (returnval){ [ 1847.470109] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521e2a7f-4544-38a6-0403-67a1988dfa6c" [ 1847.470109] env[62684]: _type = "HttpNfcLease" [ 1847.470109] env[62684]: } obtained for exporting VM: (result){ [ 1847.470109] env[62684]: value = "vm-421226" [ 1847.470109] env[62684]: _type = "VirtualMachine" [ 1847.470109] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1847.470429] env[62684]: DEBUG oslo_vmware.api [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the lease: (returnval){ [ 1847.470429] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521e2a7f-4544-38a6-0403-67a1988dfa6c" [ 1847.470429] env[62684]: _type = "HttpNfcLease" [ 1847.470429] env[62684]: } to be ready. {{(pid=62684) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1847.482099] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1847.482099] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521e2a7f-4544-38a6-0403-67a1988dfa6c" [ 1847.482099] env[62684]: _type = "HttpNfcLease" [ 1847.482099] env[62684]: } is initializing. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1847.509734] env[62684]: INFO nova.compute.manager [-] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Took 1.43 seconds to deallocate network for instance. [ 1847.654368] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052647, 'name': CreateVM_Task, 'duration_secs': 0.347305} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1847.654521] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1847.654955] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1847.655126] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1847.655464] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1847.655726] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59f8f91f-6312-435a-a8e4-34fa964e69f6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.661326] env[62684]: DEBUG oslo_vmware.api [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Waiting for the task: (returnval){ [ 1847.661326] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5282f0ad-b96f-e7bf-d622-024dce5a4f21" [ 1847.661326] env[62684]: _type = "Task" [ 1847.661326] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1847.673657] env[62684]: DEBUG oslo_vmware.api [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5282f0ad-b96f-e7bf-d622-024dce5a4f21, 'name': SearchDatastore_Task} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1847.674109] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1847.674247] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1847.674940] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1847.674940] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1847.675097] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1847.675525] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6399aa40-4f70-4f7c-9e61-cd0f2b8f7a51 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.687019] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1847.687220] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1847.688017] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f1aa167-2bb3-4fb6-8c68-c011757494c0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.696189] env[62684]: DEBUG oslo_vmware.api [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Waiting for the task: (returnval){ [ 1847.696189] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c0a96c-9358-abc8-125d-f4e3420e315a" [ 1847.696189] env[62684]: _type = "Task" [ 1847.696189] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1847.699496] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: c1580c72-9345-436e-b4f7-56d319248864] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1847.706283] env[62684]: DEBUG oslo_vmware.api [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c0a96c-9358-abc8-125d-f4e3420e315a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.948849] env[62684]: DEBUG oslo_concurrency.lockutils [None req-280129b5-0d5d-41eb-a325-508f42239ae5 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1847.982189] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1847.982189] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521e2a7f-4544-38a6-0403-67a1988dfa6c" [ 1847.982189] env[62684]: _type = "HttpNfcLease" [ 1847.982189] env[62684]: } is ready. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1847.982514] env[62684]: DEBUG oslo_vmware.rw_handles [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1847.982514] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521e2a7f-4544-38a6-0403-67a1988dfa6c" [ 1847.982514] env[62684]: _type = "HttpNfcLease" [ 1847.982514] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1847.983336] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a77aa88e-e18a-47c4-9691-252b1e14a3e1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.994672] env[62684]: DEBUG oslo_vmware.rw_handles [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5222ed62-0042-ab04-cbcc-f76946bdbc78/disk-0.vmdk from lease info. {{(pid=62684) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1847.995120] env[62684]: DEBUG oslo_vmware.rw_handles [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5222ed62-0042-ab04-cbcc-f76946bdbc78/disk-0.vmdk for reading. {{(pid=62684) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1848.050221] env[62684]: DEBUG oslo_concurrency.lockutils [None req-83b12138-2343-421d-a96e-3bec366690c7 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1848.051090] env[62684]: DEBUG nova.compute.manager [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1848.078566] env[62684]: DEBUG nova.virt.hardware [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1848.078834] env[62684]: DEBUG nova.virt.hardware [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1848.078992] env[62684]: DEBUG nova.virt.hardware [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1848.079194] env[62684]: DEBUG nova.virt.hardware [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1848.079343] env[62684]: DEBUG nova.virt.hardware [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1848.079501] env[62684]: DEBUG nova.virt.hardware [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1848.079721] env[62684]: DEBUG nova.virt.hardware [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1848.079885] env[62684]: DEBUG nova.virt.hardware [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1848.080065] env[62684]: DEBUG nova.virt.hardware [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1848.080706] env[62684]: DEBUG nova.virt.hardware [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1848.080706] env[62684]: DEBUG nova.virt.hardware [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1848.081301] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4203d50a-ee50-4339-9fcc-9b1902441309 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.088970] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d1cc11b9-ff70-46c3-a851-c617766851dc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.094484] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fc0ac75-46d7-4f0a-bec9-fa24c836a9ea {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.102357] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88eb49fa-a3a0-40f3-96db-cd97c761e3cc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.119808] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e404b75c-8b7d-4b1d-bdec-b8126e16bd6d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.154826] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c101e0f8-d3f1-4f81-accf-b37d62002f57 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.163053] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfb7a97f-91aa-4fee-8a3c-f77ba2598901 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.177133] env[62684]: DEBUG nova.compute.provider_tree [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1848.206530] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 0f9a525c-09b9-483e-b418-fea6e6e5dc4a] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1848.208836] env[62684]: DEBUG oslo_vmware.api [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c0a96c-9358-abc8-125d-f4e3420e315a, 'name': SearchDatastore_Task, 'duration_secs': 0.011956} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1848.209511] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-520d8d5d-56c4-44ef-8980-1b90d5d2de49 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.215252] env[62684]: DEBUG oslo_vmware.api [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Waiting for the task: (returnval){ [ 1848.215252] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52cbb236-d7a3-9218-23fc-1f20ad019edf" [ 1848.215252] env[62684]: _type = "Task" [ 1848.215252] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.228216] env[62684]: DEBUG oslo_vmware.api [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52cbb236-d7a3-9218-23fc-1f20ad019edf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.680561] env[62684]: DEBUG nova.scheduler.client.report [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1848.713375] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: e4528a29-163d-4f5e-9497-6e6b90b290ba] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1848.729369] env[62684]: DEBUG oslo_vmware.api [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52cbb236-d7a3-9218-23fc-1f20ad019edf, 'name': SearchDatastore_Task, 'duration_secs': 0.010053} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1848.729369] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1848.729369] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] e2a9ab56-bde3-40b6-a214-19c77a9c6778/e2a9ab56-bde3-40b6-a214-19c77a9c6778.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1848.729369] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-19e2f101-44e6-41e9-8dda-928c20697f06 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.738579] env[62684]: DEBUG oslo_vmware.api [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Waiting for the task: (returnval){ [ 1848.738579] env[62684]: value = "task-2052649" [ 1848.738579] env[62684]: _type = "Task" [ 1848.738579] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.749801] env[62684]: DEBUG oslo_vmware.api [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': task-2052649, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.113497] env[62684]: DEBUG nova.network.neutron [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Successfully updated port: 1b1935d6-812b-4914-a0c8-8cfc6f58c715 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1849.185642] env[62684]: DEBUG nova.compute.manager [req-1d8fa60b-0cc3-4334-a8a8-0f08ededadda req-83bc57e9-0fe0-43df-a26d-b94e8370cafb service nova] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Received event network-vif-plugged-1b1935d6-812b-4914-a0c8-8cfc6f58c715 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1849.185924] env[62684]: DEBUG oslo_concurrency.lockutils [req-1d8fa60b-0cc3-4334-a8a8-0f08ededadda req-83bc57e9-0fe0-43df-a26d-b94e8370cafb service nova] Acquiring lock "4a15d298-115f-4132-8be0-00e623fa21d8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1849.186240] env[62684]: DEBUG oslo_concurrency.lockutils [req-1d8fa60b-0cc3-4334-a8a8-0f08ededadda req-83bc57e9-0fe0-43df-a26d-b94e8370cafb service nova] Lock "4a15d298-115f-4132-8be0-00e623fa21d8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1849.186378] env[62684]: DEBUG oslo_concurrency.lockutils [req-1d8fa60b-0cc3-4334-a8a8-0f08ededadda req-83bc57e9-0fe0-43df-a26d-b94e8370cafb service nova] Lock "4a15d298-115f-4132-8be0-00e623fa21d8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1849.186597] env[62684]: DEBUG nova.compute.manager [req-1d8fa60b-0cc3-4334-a8a8-0f08ededadda req-83bc57e9-0fe0-43df-a26d-b94e8370cafb service nova] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] No waiting events found dispatching network-vif-plugged-1b1935d6-812b-4914-a0c8-8cfc6f58c715 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1849.186796] env[62684]: WARNING nova.compute.manager [req-1d8fa60b-0cc3-4334-a8a8-0f08ededadda req-83bc57e9-0fe0-43df-a26d-b94e8370cafb service nova] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Received unexpected event network-vif-plugged-1b1935d6-812b-4914-a0c8-8cfc6f58c715 for instance with vm_state building and task_state spawning. [ 1849.187754] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.685s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1849.188357] env[62684]: DEBUG nova.compute.manager [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1849.191983] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.385s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1849.194022] env[62684]: INFO nova.compute.claims [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1849.222336] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1849.222586] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Cleaning up deleted instances with incomplete migration {{(pid=62684) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 1849.251863] env[62684]: DEBUG oslo_vmware.api [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': task-2052649, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.508074} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1849.252181] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] e2a9ab56-bde3-40b6-a214-19c77a9c6778/e2a9ab56-bde3-40b6-a214-19c77a9c6778.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1849.252404] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1849.252666] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0dd0fc23-39cd-4da4-9502-9a139b5dc976 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.259611] env[62684]: DEBUG oslo_vmware.api [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Waiting for the task: (returnval){ [ 1849.259611] env[62684]: value = "task-2052650" [ 1849.259611] env[62684]: _type = "Task" [ 1849.259611] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1849.269665] env[62684]: DEBUG oslo_vmware.api [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': task-2052650, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.617017] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquiring lock "refresh_cache-4a15d298-115f-4132-8be0-00e623fa21d8" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1849.617241] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquired lock "refresh_cache-4a15d298-115f-4132-8be0-00e623fa21d8" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1849.617403] env[62684]: DEBUG nova.network.neutron [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1849.693824] env[62684]: DEBUG nova.compute.utils [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1849.695407] env[62684]: DEBUG nova.compute.manager [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1849.695624] env[62684]: DEBUG nova.network.neutron [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1849.726085] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1849.748815] env[62684]: DEBUG nova.policy [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b4de13d501bc4b8aaa78b8153b766921', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e57b232a7e7647c7a3b2bca3c096feb7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1849.769563] env[62684]: DEBUG oslo_vmware.api [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': task-2052650, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089252} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1849.769840] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1849.770673] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26aa7e61-b390-475d-8e8a-d6d17a383796 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.790602] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Reconfiguring VM instance instance-00000023 to attach disk [datastore2] e2a9ab56-bde3-40b6-a214-19c77a9c6778/e2a9ab56-bde3-40b6-a214-19c77a9c6778.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1849.792788] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a4cd1892-9f76-4333-aa10-ed4f2c74e682 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.815011] env[62684]: DEBUG oslo_vmware.api [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Waiting for the task: (returnval){ [ 1849.815011] env[62684]: value = "task-2052651" [ 1849.815011] env[62684]: _type = "Task" [ 1849.815011] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1849.823460] env[62684]: DEBUG oslo_vmware.api [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': task-2052651, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1850.073554] env[62684]: DEBUG nova.network.neutron [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Successfully created port: 8be48385-37eb-4c2e-baf8-404a9aad87de {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1850.154608] env[62684]: DEBUG nova.network.neutron [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1850.199563] env[62684]: DEBUG nova.compute.manager [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1850.328376] env[62684]: DEBUG oslo_vmware.api [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': task-2052651, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1850.330754] env[62684]: DEBUG nova.network.neutron [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Updating instance_info_cache with network_info: [{"id": "1b1935d6-812b-4914-a0c8-8cfc6f58c715", "address": "fa:16:3e:6b:ca:4f", "network": {"id": "bca0ee43-bbb1-483b-9d82-56955369f9b7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1592250106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aef5d7061c834332b9f9c5c75596bf08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bfa7abe-7e46-4d8f-b50a-4d0c4509e4dc", "external-id": "nsx-vlan-transportzone-951", "segmentation_id": 951, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b1935d6-81", "ovs_interfaceid": "1b1935d6-812b-4914-a0c8-8cfc6f58c715", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1850.697012] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-378c1a0d-4ca9-4ff5-becf-77fadca6462d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.705992] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8af56f85-8822-4065-b5ba-fefd1c5cd482 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.740946] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-213c5a65-c785-46e3-8db9-0c2286eb7398 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.750337] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b173a6d3-bc1a-47c1-967b-89f71d49eac1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.769766] env[62684]: DEBUG nova.compute.provider_tree [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1850.827346] env[62684]: DEBUG oslo_vmware.api [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': task-2052651, 'name': ReconfigVM_Task, 'duration_secs': 0.988196} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1850.827620] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Reconfigured VM instance instance-00000023 to attach disk [datastore2] e2a9ab56-bde3-40b6-a214-19c77a9c6778/e2a9ab56-bde3-40b6-a214-19c77a9c6778.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1850.828296] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7a77106c-8d27-4fc4-bbc9-9a7dcc7b67ee {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.834177] env[62684]: DEBUG oslo_vmware.api [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Waiting for the task: (returnval){ [ 1850.834177] env[62684]: value = "task-2052652" [ 1850.834177] env[62684]: _type = "Task" [ 1850.834177] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1850.834671] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Releasing lock "refresh_cache-4a15d298-115f-4132-8be0-00e623fa21d8" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1850.834992] env[62684]: DEBUG nova.compute.manager [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Instance network_info: |[{"id": "1b1935d6-812b-4914-a0c8-8cfc6f58c715", "address": "fa:16:3e:6b:ca:4f", "network": {"id": "bca0ee43-bbb1-483b-9d82-56955369f9b7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1592250106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aef5d7061c834332b9f9c5c75596bf08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bfa7abe-7e46-4d8f-b50a-4d0c4509e4dc", "external-id": "nsx-vlan-transportzone-951", "segmentation_id": 951, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b1935d6-81", "ovs_interfaceid": "1b1935d6-812b-4914-a0c8-8cfc6f58c715", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1850.838376] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6b:ca:4f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8bfa7abe-7e46-4d8f-b50a-4d0c4509e4dc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1b1935d6-812b-4914-a0c8-8cfc6f58c715', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1850.845860] env[62684]: DEBUG oslo.service.loopingcall [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1850.846480] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1850.847215] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bd7eaf91-b6b0-445f-b065-83a57daf9fd8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.868254] env[62684]: DEBUG oslo_vmware.api [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': task-2052652, 'name': Rename_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1850.874923] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1850.874923] env[62684]: value = "task-2052653" [ 1850.874923] env[62684]: _type = "Task" [ 1850.874923] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1850.883467] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052653, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.215750] env[62684]: DEBUG nova.compute.manager [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1851.222547] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1851.222743] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1851.273697] env[62684]: DEBUG nova.scheduler.client.report [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1851.344977] env[62684]: DEBUG oslo_vmware.api [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': task-2052652, 'name': Rename_Task, 'duration_secs': 0.166296} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1851.345301] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1851.345663] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-abf1d1ef-07ce-4ea3-9978-24fbd4d24422 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.353783] env[62684]: DEBUG oslo_vmware.api [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Waiting for the task: (returnval){ [ 1851.353783] env[62684]: value = "task-2052654" [ 1851.353783] env[62684]: _type = "Task" [ 1851.353783] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1851.363350] env[62684]: DEBUG oslo_vmware.api [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': task-2052654, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.384505] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052653, 'name': CreateVM_Task, 'duration_secs': 0.346759} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1851.384690] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1851.385505] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1851.385686] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1851.386064] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1851.386312] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-622c22ac-b594-4758-a59d-892b7cc6f9a7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.391165] env[62684]: DEBUG oslo_vmware.api [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1851.391165] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5201da76-75bb-7d9f-1c75-486c7430e65a" [ 1851.391165] env[62684]: _type = "Task" [ 1851.391165] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1851.399383] env[62684]: DEBUG oslo_vmware.api [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5201da76-75bb-7d9f-1c75-486c7430e65a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.616182] env[62684]: DEBUG nova.network.neutron [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Successfully updated port: 8be48385-37eb-4c2e-baf8-404a9aad87de {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1851.734890] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1851.734890] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Starting heal instance info cache {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1851.779836] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.588s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1851.780425] env[62684]: DEBUG nova.compute.manager [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1851.783548] env[62684]: DEBUG oslo_concurrency.lockutils [None req-748e3eeb-8de3-4a8e-baaf-2c3973440532 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.827s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1851.783749] env[62684]: DEBUG oslo_concurrency.lockutils [None req-748e3eeb-8de3-4a8e-baaf-2c3973440532 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1851.786750] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8284ed08-d9f0-424a-b027-14e8b785b229 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.979s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1851.786893] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8284ed08-d9f0-424a-b027-14e8b785b229 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1851.789243] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.893s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1851.790702] env[62684]: INFO nova.compute.claims [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1851.819885] env[62684]: INFO nova.scheduler.client.report [None req-8284ed08-d9f0-424a-b027-14e8b785b229 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Deleted allocations for instance dfe40a8c-61d6-4c60-afd3-0defb61c4308 [ 1851.823091] env[62684]: INFO nova.scheduler.client.report [None req-748e3eeb-8de3-4a8e-baaf-2c3973440532 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Deleted allocations for instance 8c046991-b294-4f33-9fce-a241984d66d7 [ 1851.865558] env[62684]: DEBUG oslo_vmware.api [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': task-2052654, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.902801] env[62684]: DEBUG oslo_vmware.api [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5201da76-75bb-7d9f-1c75-486c7430e65a, 'name': SearchDatastore_Task, 'duration_secs': 0.012528} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1851.903179] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1851.903505] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1851.903769] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1851.903922] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1851.904153] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1851.904450] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-17610752-06a2-4108-85e6-92d06a700d1c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.914055] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1851.914267] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1851.915274] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2cfbca1f-ef49-441c-aa8b-c5e8e3dd1f6c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.921135] env[62684]: DEBUG oslo_vmware.api [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1851.921135] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5219e8de-281f-61a7-1f2f-473c4730d8aa" [ 1851.921135] env[62684]: _type = "Task" [ 1851.921135] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1851.933141] env[62684]: DEBUG oslo_vmware.api [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5219e8de-281f-61a7-1f2f-473c4730d8aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.118651] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquiring lock "refresh_cache-b4cd871a-30ea-4b7a-98ad-00b8676dc2cd" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1852.118851] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquired lock "refresh_cache-b4cd871a-30ea-4b7a-98ad-00b8676dc2cd" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1852.118949] env[62684]: DEBUG nova.network.neutron [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1852.297598] env[62684]: DEBUG nova.compute.utils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1852.299204] env[62684]: DEBUG nova.compute.manager [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1852.299416] env[62684]: DEBUG nova.network.neutron [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1852.333630] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8284ed08-d9f0-424a-b027-14e8b785b229 tempest-InstanceActionsTestJSON-1623571015 tempest-InstanceActionsTestJSON-1623571015-project-member] Lock "dfe40a8c-61d6-4c60-afd3-0defb61c4308" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.558s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1852.334956] env[62684]: DEBUG oslo_concurrency.lockutils [None req-748e3eeb-8de3-4a8e-baaf-2c3973440532 tempest-ServerGroupTestJSON-974619422 tempest-ServerGroupTestJSON-974619422-project-member] Lock "8c046991-b294-4f33-9fce-a241984d66d7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.531s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1852.345907] env[62684]: DEBUG nova.policy [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6a592492907b44d1bdf390c83fd54177', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '843e3293347643789e54644c035332dc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1852.374856] env[62684]: DEBUG oslo_vmware.api [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': task-2052654, 'name': PowerOnVM_Task, 'duration_secs': 0.642578} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1852.375171] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1852.375435] env[62684]: DEBUG nova.compute.manager [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1852.376608] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60846943-90f8-4f04-b901-5f506273b47d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.432843] env[62684]: DEBUG oslo_vmware.api [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5219e8de-281f-61a7-1f2f-473c4730d8aa, 'name': SearchDatastore_Task, 'duration_secs': 0.012094} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1852.433874] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f473b770-1fcd-4e15-907f-e9e2e324e7cb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.440328] env[62684]: DEBUG oslo_vmware.api [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1852.440328] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]527f7dd7-f154-3d3d-070a-f66235435abc" [ 1852.440328] env[62684]: _type = "Task" [ 1852.440328] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1852.450257] env[62684]: DEBUG oslo_vmware.api [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]527f7dd7-f154-3d3d-070a-f66235435abc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.647252] env[62684]: DEBUG nova.network.neutron [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Successfully created port: 1333c708-96f6-4c98-bc29-9be57f9be96f {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1852.652042] env[62684]: DEBUG nova.network.neutron [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1852.778723] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "refresh_cache-0676806b-c1f0-4c1a-a12d-add2edf1588f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1852.779085] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired lock "refresh_cache-0676806b-c1f0-4c1a-a12d-add2edf1588f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1852.779085] env[62684]: DEBUG nova.network.neutron [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Forcefully refreshing network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1852.803740] env[62684]: DEBUG nova.compute.manager [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1852.819305] env[62684]: DEBUG nova.network.neutron [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Updating instance_info_cache with network_info: [{"id": "8be48385-37eb-4c2e-baf8-404a9aad87de", "address": "fa:16:3e:33:7c:80", "network": {"id": "64494ea7-f6d9-430c-8ac7-e876e763004b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2056829508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e57b232a7e7647c7a3b2bca3c096feb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8be48385-37", "ovs_interfaceid": "8be48385-37eb-4c2e-baf8-404a9aad87de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1852.898169] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1852.951736] env[62684]: DEBUG oslo_vmware.api [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]527f7dd7-f154-3d3d-070a-f66235435abc, 'name': SearchDatastore_Task, 'duration_secs': 0.011449} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1852.952232] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1852.952540] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 4a15d298-115f-4132-8be0-00e623fa21d8/4a15d298-115f-4132-8be0-00e623fa21d8.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1852.952718] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-68c32355-3740-424c-9a31-1f5cfb3e7072 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.964060] env[62684]: DEBUG oslo_vmware.api [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1852.964060] env[62684]: value = "task-2052655" [ 1852.964060] env[62684]: _type = "Task" [ 1852.964060] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1852.972842] env[62684]: DEBUG oslo_vmware.api [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052655, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1853.299107] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8300e544-7128-4c44-8b86-c673c566fecb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.311928] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbe4bfb3-6992-4c45-b3db-7cae528dd351 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.324861] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Releasing lock "refresh_cache-b4cd871a-30ea-4b7a-98ad-00b8676dc2cd" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1853.325880] env[62684]: DEBUG nova.compute.manager [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Instance network_info: |[{"id": "8be48385-37eb-4c2e-baf8-404a9aad87de", "address": "fa:16:3e:33:7c:80", "network": {"id": "64494ea7-f6d9-430c-8ac7-e876e763004b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2056829508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e57b232a7e7647c7a3b2bca3c096feb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8be48385-37", "ovs_interfaceid": "8be48385-37eb-4c2e-baf8-404a9aad87de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1853.361690] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ccc0865-551b-4b52-b448-7dd47659d971 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.373828] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4efd4e8-143d-4792-936e-983925ed36e3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.395751] env[62684]: DEBUG nova.compute.provider_tree [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1853.476209] env[62684]: DEBUG oslo_vmware.api [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052655, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.471319} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1853.476595] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 4a15d298-115f-4132-8be0-00e623fa21d8/4a15d298-115f-4132-8be0-00e623fa21d8.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1853.476906] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1853.477225] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f2e7cf94-9dec-4b84-a00f-d4fc5908f2ee {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.484450] env[62684]: DEBUG oslo_vmware.api [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1853.484450] env[62684]: value = "task-2052656" [ 1853.484450] env[62684]: _type = "Task" [ 1853.484450] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1853.493675] env[62684]: DEBUG oslo_vmware.api [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052656, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1853.612870] env[62684]: DEBUG nova.virt.hardware [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1853.613193] env[62684]: DEBUG nova.virt.hardware [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1853.613384] env[62684]: DEBUG nova.virt.hardware [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1853.613688] env[62684]: DEBUG nova.virt.hardware [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1853.613858] env[62684]: DEBUG nova.virt.hardware [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1853.618039] env[62684]: DEBUG nova.virt.hardware [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1853.618039] env[62684]: DEBUG nova.virt.hardware [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1853.618039] env[62684]: DEBUG nova.virt.hardware [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1853.618039] env[62684]: DEBUG nova.virt.hardware [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1853.618039] env[62684]: DEBUG nova.virt.hardware [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1853.618321] env[62684]: DEBUG nova.virt.hardware [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1853.618321] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9741c82b-db9e-4745-b667-43ab5b26913c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.626340] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fb3fe6b-c36b-4fdf-badb-3e832c3a53b1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.641466] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:33:7c:80', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6966f473-59ac-49bb-9b7a-22c61f4e61e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8be48385-37eb-4c2e-baf8-404a9aad87de', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1853.652872] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Creating folder: Project (e57b232a7e7647c7a3b2bca3c096feb7). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1853.655498] env[62684]: DEBUG oslo_vmware.rw_handles [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5254c8fb-7a8e-143a-1aa0-1e96e15ebb94/disk-0.vmdk. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1853.655784] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b889a7fb-f41e-496c-bb12-4924d900bb3b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.658610] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4811a7c5-ebb1-4029-9dfc-568901177943 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.665980] env[62684]: DEBUG oslo_vmware.rw_handles [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5254c8fb-7a8e-143a-1aa0-1e96e15ebb94/disk-0.vmdk is in state: ready. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1853.666106] env[62684]: ERROR oslo_vmware.rw_handles [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5254c8fb-7a8e-143a-1aa0-1e96e15ebb94/disk-0.vmdk due to incomplete transfer. [ 1853.666322] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-f99843a5-253a-4ada-bfec-63ae3bad5a9c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.671636] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Created folder: Project (e57b232a7e7647c7a3b2bca3c096feb7) in parent group-v421118. [ 1853.671858] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Creating folder: Instances. Parent ref: group-v421229. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1853.672124] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d34d32eb-8e46-4ea5-8d07-1f936ad7d7c7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.675386] env[62684]: DEBUG oslo_vmware.rw_handles [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5254c8fb-7a8e-143a-1aa0-1e96e15ebb94/disk-0.vmdk. {{(pid=62684) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1853.675627] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Uploaded image b28939e6-697c-4d7e-b45d-fc69ecb176b1 to the Glance image server {{(pid=62684) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1853.678235] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Destroying the VM {{(pid=62684) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1853.678900] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-b5656bb4-e5d5-42c7-9150-96917449396a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.686608] env[62684]: DEBUG oslo_vmware.api [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Waiting for the task: (returnval){ [ 1853.686608] env[62684]: value = "task-2052659" [ 1853.686608] env[62684]: _type = "Task" [ 1853.686608] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1853.690643] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Created folder: Instances in parent group-v421229. [ 1853.690942] env[62684]: DEBUG oslo.service.loopingcall [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1853.691577] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1853.691838] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d9fdff00-8166-49ae-b614-a6a5af63fafa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.711928] env[62684]: DEBUG oslo_vmware.api [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052659, 'name': Destroy_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1853.717498] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1853.717498] env[62684]: value = "task-2052660" [ 1853.717498] env[62684]: _type = "Task" [ 1853.717498] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1853.727818] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052660, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1853.824660] env[62684]: DEBUG nova.compute.manager [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1853.848856] env[62684]: DEBUG nova.virt.hardware [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1853.849158] env[62684]: DEBUG nova.virt.hardware [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1853.849366] env[62684]: DEBUG nova.virt.hardware [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1853.849530] env[62684]: DEBUG nova.virt.hardware [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1853.849679] env[62684]: DEBUG nova.virt.hardware [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1853.849855] env[62684]: DEBUG nova.virt.hardware [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1853.850088] env[62684]: DEBUG nova.virt.hardware [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1853.850263] env[62684]: DEBUG nova.virt.hardware [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1853.850451] env[62684]: DEBUG nova.virt.hardware [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1853.850622] env[62684]: DEBUG nova.virt.hardware [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1853.850800] env[62684]: DEBUG nova.virt.hardware [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1853.851735] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d8c5d2c-2718-4086-8366-98009e31e5ce {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.860810] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3f13ca3-7dc4-417f-a700-c980d183315f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.921693] env[62684]: ERROR nova.scheduler.client.report [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [req-73ea7c8a-4848-4a38-a6af-5a7d7af4ab01] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c23c281e-ec1f-4876-972e-a98655f2084f. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-73ea7c8a-4848-4a38-a6af-5a7d7af4ab01"}]} [ 1853.946776] env[62684]: DEBUG nova.scheduler.client.report [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Refreshing inventories for resource provider c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1853.969951] env[62684]: DEBUG nova.scheduler.client.report [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Updating ProviderTree inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1853.971030] env[62684]: DEBUG nova.compute.provider_tree [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1853.987116] env[62684]: DEBUG nova.scheduler.client.report [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Refreshing aggregate associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, aggregates: None {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1854.001446] env[62684]: DEBUG oslo_vmware.api [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052656, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.118634} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1854.001814] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1854.003674] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4448ec09-f7bf-4958-befc-0524d67d1190 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.028535] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Reconfiguring VM instance instance-00000024 to attach disk [datastore2] 4a15d298-115f-4132-8be0-00e623fa21d8/4a15d298-115f-4132-8be0-00e623fa21d8.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1854.032202] env[62684]: DEBUG nova.scheduler.client.report [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Refreshing trait associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1854.034394] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c48a89f3-6a7c-4e05-854d-ff4f26c02efe {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.061264] env[62684]: DEBUG oslo_vmware.api [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1854.061264] env[62684]: value = "task-2052661" [ 1854.061264] env[62684]: _type = "Task" [ 1854.061264] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1854.072289] env[62684]: DEBUG oslo_vmware.api [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052661, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.195803] env[62684]: DEBUG oslo_vmware.api [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052659, 'name': Destroy_Task} progress is 33%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.231412] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052660, 'name': CreateVM_Task, 'duration_secs': 0.366475} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1854.231747] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1854.232328] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1854.233435] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1854.233435] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1854.233435] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87af78f7-3911-4697-b014-0112c2c274bd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.239977] env[62684]: DEBUG oslo_vmware.api [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 1854.239977] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ea5175-25d5-31e2-ec69-d73da5540759" [ 1854.239977] env[62684]: _type = "Task" [ 1854.239977] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1854.253314] env[62684]: DEBUG oslo_vmware.api [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ea5175-25d5-31e2-ec69-d73da5540759, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.269084] env[62684]: DEBUG nova.network.neutron [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Successfully updated port: 1333c708-96f6-4c98-bc29-9be57f9be96f {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1854.292848] env[62684]: DEBUG nova.network.neutron [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Updating instance_info_cache with network_info: [{"id": "10023d3d-f0cd-49c9-984f-fb3f2af83e3b", "address": "fa:16:3e:2d:b8:1b", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.60", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10023d3d-f0", "ovs_interfaceid": "10023d3d-f0cd-49c9-984f-fb3f2af83e3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1854.361466] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Acquiring lock "8449f09b-4e7b-4511-bb3c-2ff6667addb2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1854.361802] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Lock "8449f09b-4e7b-4511-bb3c-2ff6667addb2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1854.494775] env[62684]: DEBUG nova.compute.manager [req-42533007-d47c-4d01-8100-e446568af1ac req-52943505-fd28-4565-b01c-5ab605267994 service nova] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Received event network-changed-1b1935d6-812b-4914-a0c8-8cfc6f58c715 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1854.495095] env[62684]: DEBUG nova.compute.manager [req-42533007-d47c-4d01-8100-e446568af1ac req-52943505-fd28-4565-b01c-5ab605267994 service nova] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Refreshing instance network info cache due to event network-changed-1b1935d6-812b-4914-a0c8-8cfc6f58c715. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1854.495518] env[62684]: DEBUG oslo_concurrency.lockutils [req-42533007-d47c-4d01-8100-e446568af1ac req-52943505-fd28-4565-b01c-5ab605267994 service nova] Acquiring lock "refresh_cache-4a15d298-115f-4132-8be0-00e623fa21d8" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1854.495730] env[62684]: DEBUG oslo_concurrency.lockutils [req-42533007-d47c-4d01-8100-e446568af1ac req-52943505-fd28-4565-b01c-5ab605267994 service nova] Acquired lock "refresh_cache-4a15d298-115f-4132-8be0-00e623fa21d8" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1854.495944] env[62684]: DEBUG nova.network.neutron [req-42533007-d47c-4d01-8100-e446568af1ac req-52943505-fd28-4565-b01c-5ab605267994 service nova] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Refreshing network info cache for port 1b1935d6-812b-4914-a0c8-8cfc6f58c715 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1854.567483] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1254983d-cf7e-4507-b977-0c428994dfb4 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] Acquiring lock "d532b5fa-90a3-4f25-8684-4eabaa432c86" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1854.568079] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1254983d-cf7e-4507-b977-0c428994dfb4 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] Lock "d532b5fa-90a3-4f25-8684-4eabaa432c86" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1854.578348] env[62684]: DEBUG oslo_vmware.api [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052661, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.632339] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb380104-db7f-4f4e-8325-d467399f0f67 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.641126] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52a61b70-ae7c-4bdb-b47a-ca403885f0a1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.673803] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-253e3636-78bb-49a3-85d3-e99b61e7c851 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.682730] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0649849d-91d5-4d3c-b9ab-e3d4c78aa5e8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.695410] env[62684]: DEBUG oslo_vmware.api [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052659, 'name': Destroy_Task, 'duration_secs': 0.563185} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1854.705718] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Destroyed the VM [ 1854.706089] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Deleting Snapshot of the VM instance {{(pid=62684) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1854.706993] env[62684]: DEBUG nova.compute.provider_tree [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1854.710307] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-57b85421-1a9c-4fa6-89e1-e73ba306c0c8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.717975] env[62684]: DEBUG oslo_vmware.api [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Waiting for the task: (returnval){ [ 1854.717975] env[62684]: value = "task-2052662" [ 1854.717975] env[62684]: _type = "Task" [ 1854.717975] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1854.720013] env[62684]: DEBUG oslo_vmware.rw_handles [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5222ed62-0042-ab04-cbcc-f76946bdbc78/disk-0.vmdk. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1854.720970] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef412622-0c0c-4d03-88cd-be69189ba557 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.731011] env[62684]: DEBUG oslo_vmware.rw_handles [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5222ed62-0042-ab04-cbcc-f76946bdbc78/disk-0.vmdk is in state: ready. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1854.731011] env[62684]: ERROR oslo_vmware.rw_handles [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5222ed62-0042-ab04-cbcc-f76946bdbc78/disk-0.vmdk due to incomplete transfer. [ 1854.733878] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-7d3f5ee0-9a38-4b87-b8c4-e04532a63a2d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.736064] env[62684]: DEBUG oslo_vmware.api [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052662, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.744943] env[62684]: DEBUG oslo_vmware.rw_handles [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5222ed62-0042-ab04-cbcc-f76946bdbc78/disk-0.vmdk. {{(pid=62684) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1854.745212] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Uploaded image 08ff9f3e-529a-4533-8ecd-7c43601c17d3 to the Glance image server {{(pid=62684) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1854.746845] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Destroying the VM {{(pid=62684) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1854.747463] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-f16a2763-8a27-4f45-a132-7d42804f2339 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.752355] env[62684]: DEBUG oslo_vmware.api [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ea5175-25d5-31e2-ec69-d73da5540759, 'name': SearchDatastore_Task, 'duration_secs': 0.011011} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1854.752930] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1854.753203] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1854.753455] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1854.753679] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1854.753881] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1854.754143] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9b3f5c59-ff45-4579-b9dd-6280c295c125 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.757179] env[62684]: DEBUG oslo_vmware.api [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1854.757179] env[62684]: value = "task-2052663" [ 1854.757179] env[62684]: _type = "Task" [ 1854.757179] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1854.764175] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1854.764386] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1854.765242] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1615afd-ccd1-49e4-882d-fe94237286ee {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.770726] env[62684]: DEBUG oslo_vmware.api [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052663, 'name': Destroy_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.772759] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Acquiring lock "refresh_cache-cfe219da-adf9-44b9-9df3-752ccf72a68b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1854.772885] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Acquired lock "refresh_cache-cfe219da-adf9-44b9-9df3-752ccf72a68b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1854.773067] env[62684]: DEBUG nova.network.neutron [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1854.775532] env[62684]: DEBUG oslo_vmware.api [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 1854.775532] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a171fd-aa2e-0cef-37c5-44b8dbeb11eb" [ 1854.775532] env[62684]: _type = "Task" [ 1854.775532] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1854.784359] env[62684]: DEBUG oslo_vmware.api [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a171fd-aa2e-0cef-37c5-44b8dbeb11eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.795499] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Releasing lock "refresh_cache-0676806b-c1f0-4c1a-a12d-add2edf1588f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1854.795732] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Updated the network info_cache for instance {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 1854.795932] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1854.796204] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1854.796424] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1854.796651] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1854.796866] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1854.797036] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1855.073432] env[62684]: DEBUG oslo_vmware.api [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052661, 'name': ReconfigVM_Task, 'duration_secs': 0.538185} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1855.074506] env[62684]: DEBUG nova.compute.utils [None req-1254983d-cf7e-4507-b977-0c428994dfb4 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1855.075903] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Reconfigured VM instance instance-00000024 to attach disk [datastore2] 4a15d298-115f-4132-8be0-00e623fa21d8/4a15d298-115f-4132-8be0-00e623fa21d8.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1855.078081] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-38929cdf-c8a0-47d6-869d-90549d171b01 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.084161] env[62684]: DEBUG oslo_vmware.api [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1855.084161] env[62684]: value = "task-2052664" [ 1855.084161] env[62684]: _type = "Task" [ 1855.084161] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.094548] env[62684]: DEBUG oslo_vmware.api [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052664, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.232772] env[62684]: DEBUG oslo_vmware.api [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052662, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.252170] env[62684]: DEBUG nova.scheduler.client.report [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Updated inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f with generation 60 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1855.252170] env[62684]: DEBUG nova.compute.provider_tree [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Updating resource provider c23c281e-ec1f-4876-972e-a98655f2084f generation from 60 to 61 during operation: update_inventory {{(pid=62684) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1855.252380] env[62684]: DEBUG nova.compute.provider_tree [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1855.276299] env[62684]: DEBUG oslo_vmware.api [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052663, 'name': Destroy_Task} progress is 33%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.289562] env[62684]: DEBUG oslo_vmware.api [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a171fd-aa2e-0cef-37c5-44b8dbeb11eb, 'name': SearchDatastore_Task, 'duration_secs': 0.008963} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1855.290381] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65814824-13a0-4099-8ba3-7d369736670f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.296147] env[62684]: DEBUG oslo_vmware.api [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 1855.296147] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521111c0-4608-1c7f-a2a9-cd6ce97039c8" [ 1855.296147] env[62684]: _type = "Task" [ 1855.296147] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.305019] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Getting list of instances from cluster (obj){ [ 1855.305019] env[62684]: value = "domain-c8" [ 1855.305019] env[62684]: _type = "ClusterComputeResource" [ 1855.305019] env[62684]: } {{(pid=62684) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1855.306365] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f1d2e4d-796c-4d08-84e0-a5d44eb3cbe4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.310487] env[62684]: DEBUG nova.network.neutron [req-42533007-d47c-4d01-8100-e446568af1ac req-52943505-fd28-4565-b01c-5ab605267994 service nova] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Updated VIF entry in instance network info cache for port 1b1935d6-812b-4914-a0c8-8cfc6f58c715. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1855.310829] env[62684]: DEBUG nova.network.neutron [req-42533007-d47c-4d01-8100-e446568af1ac req-52943505-fd28-4565-b01c-5ab605267994 service nova] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Updating instance_info_cache with network_info: [{"id": "1b1935d6-812b-4914-a0c8-8cfc6f58c715", "address": "fa:16:3e:6b:ca:4f", "network": {"id": "bca0ee43-bbb1-483b-9d82-56955369f9b7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1592250106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aef5d7061c834332b9f9c5c75596bf08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bfa7abe-7e46-4d8f-b50a-4d0c4509e4dc", "external-id": "nsx-vlan-transportzone-951", "segmentation_id": 951, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b1935d6-81", "ovs_interfaceid": "1b1935d6-812b-4914-a0c8-8cfc6f58c715", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1855.315943] env[62684]: DEBUG oslo_vmware.api [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521111c0-4608-1c7f-a2a9-cd6ce97039c8, 'name': SearchDatastore_Task, 'duration_secs': 0.010104} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1855.316447] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1855.316733] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] b4cd871a-30ea-4b7a-98ad-00b8676dc2cd/b4cd871a-30ea-4b7a-98ad-00b8676dc2cd.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1855.316996] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-554ed5e5-ab27-4106-8b93-4447aeffd1a9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.336574] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Got total of 17 instances {{(pid=62684) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1855.336775] env[62684]: WARNING nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] While synchronizing instance power states, found 25 instances in the database and 17 instances on the hypervisor. [ 1855.336918] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Triggering sync for uuid 0676806b-c1f0-4c1a-a12d-add2edf1588f {{(pid=62684) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1855.337125] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Triggering sync for uuid effc673a-103f-413b-88ac-6907ad1ee852 {{(pid=62684) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1855.337286] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Triggering sync for uuid fb7f38a0-bcfa-4d96-bde3-20d6f1d70112 {{(pid=62684) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1855.337435] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Triggering sync for uuid 6b1f0e69-3915-40dc-b4ec-93ab174f12b6 {{(pid=62684) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1855.337579] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Triggering sync for uuid d532b5fa-90a3-4f25-8684-4eabaa432c86 {{(pid=62684) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1855.337725] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Triggering sync for uuid 73f27fc0-ebae-41c7-b292-14396f79a5a2 {{(pid=62684) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1855.337879] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Triggering sync for uuid dcb0a5b2-379e-44ff-a9b0-be615943c94e {{(pid=62684) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1855.338030] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Triggering sync for uuid 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b {{(pid=62684) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1855.338177] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Triggering sync for uuid 06751c34-0724-44ba-a263-ad27fcf2920f {{(pid=62684) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1855.338319] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Triggering sync for uuid 0dbd52ac-c987-4728-974e-73e99465c5e7 {{(pid=62684) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1855.338493] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Triggering sync for uuid ab2c7cbe-6f46-4174-bffb-055a15f2d56b {{(pid=62684) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1855.338635] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Triggering sync for uuid a9dfeb4d-a92e-41cf-9d2f-43086cc9e868 {{(pid=62684) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1855.338775] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Triggering sync for uuid 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978 {{(pid=62684) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1855.338914] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Triggering sync for uuid f44b2e88-af6d-4252-b562-9d5fa7745b56 {{(pid=62684) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1855.339065] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Triggering sync for uuid 3a172e9f-9f79-489e-9571-80bd74ad8609 {{(pid=62684) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1855.339209] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Triggering sync for uuid 6d4061e4-a074-445d-95c5-239014ee87f3 {{(pid=62684) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1855.339348] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Triggering sync for uuid 52839b18-a68a-4ec7-a921-c42454955e82 {{(pid=62684) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1855.339486] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Triggering sync for uuid e08f8636-5193-40fa-972c-f0ecab193fc1 {{(pid=62684) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1855.339624] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Triggering sync for uuid b788c51b-367b-4eef-93d2-faa8836469b6 {{(pid=62684) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1855.339762] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Triggering sync for uuid d06f3099-d05f-417f-a71a-7b368590624f {{(pid=62684) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1855.339903] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Triggering sync for uuid e2a9ab56-bde3-40b6-a214-19c77a9c6778 {{(pid=62684) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1855.340060] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Triggering sync for uuid 4a15d298-115f-4132-8be0-00e623fa21d8 {{(pid=62684) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1855.340196] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Triggering sync for uuid b4cd871a-30ea-4b7a-98ad-00b8676dc2cd {{(pid=62684) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1855.340353] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Triggering sync for uuid cfe219da-adf9-44b9-9df3-752ccf72a68b {{(pid=62684) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1855.340556] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Triggering sync for uuid c6dc5401-f59e-4c18-9553-1240e2f49bce {{(pid=62684) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1855.342541] env[62684]: DEBUG nova.network.neutron [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1855.344505] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "0676806b-c1f0-4c1a-a12d-add2edf1588f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.344729] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "0676806b-c1f0-4c1a-a12d-add2edf1588f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1855.345027] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "effc673a-103f-413b-88ac-6907ad1ee852" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.345217] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "effc673a-103f-413b-88ac-6907ad1ee852" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1855.345500] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "fb7f38a0-bcfa-4d96-bde3-20d6f1d70112" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.345698] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "6b1f0e69-3915-40dc-b4ec-93ab174f12b6" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.345870] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "6b1f0e69-3915-40dc-b4ec-93ab174f12b6" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1855.346124] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "d532b5fa-90a3-4f25-8684-4eabaa432c86" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.346333] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "73f27fc0-ebae-41c7-b292-14396f79a5a2" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.346511] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "73f27fc0-ebae-41c7-b292-14396f79a5a2" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1855.346762] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "dcb0a5b2-379e-44ff-a9b0-be615943c94e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.346940] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "dcb0a5b2-379e-44ff-a9b0-be615943c94e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1855.347189] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "5bc73032-45f9-4b5c-a4ea-e07c48e4f82b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.347366] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "5bc73032-45f9-4b5c-a4ea-e07c48e4f82b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1855.348177] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "06751c34-0724-44ba-a263-ad27fcf2920f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.348386] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "06751c34-0724-44ba-a263-ad27fcf2920f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1855.348645] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "0dbd52ac-c987-4728-974e-73e99465c5e7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.348848] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "0dbd52ac-c987-4728-974e-73e99465c5e7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1855.349099] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "ab2c7cbe-6f46-4174-bffb-055a15f2d56b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.349282] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "ab2c7cbe-6f46-4174-bffb-055a15f2d56b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1855.349449] env[62684]: INFO nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] During sync_power_state the instance has a pending task (image_uploading). Skip. [ 1855.349614] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "ab2c7cbe-6f46-4174-bffb-055a15f2d56b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1855.349806] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "a9dfeb4d-a92e-41cf-9d2f-43086cc9e868" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.349977] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "a9dfeb4d-a92e-41cf-9d2f-43086cc9e868" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1855.350221] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "4e5152b0-7bac-4dc2-b6c7-6590fa2d5978" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.350397] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "4e5152b0-7bac-4dc2-b6c7-6590fa2d5978" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1855.350619] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "f44b2e88-af6d-4252-b562-9d5fa7745b56" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.350794] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "f44b2e88-af6d-4252-b562-9d5fa7745b56" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1855.351041] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "3a172e9f-9f79-489e-9571-80bd74ad8609" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.351252] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "6d4061e4-a074-445d-95c5-239014ee87f3" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.351450] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "52839b18-a68a-4ec7-a921-c42454955e82" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.351647] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "e08f8636-5193-40fa-972c-f0ecab193fc1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.351818] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "e08f8636-5193-40fa-972c-f0ecab193fc1" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1855.351979] env[62684]: INFO nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] During sync_power_state the instance has a pending task (image_uploading). Skip. [ 1855.352150] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "e08f8636-5193-40fa-972c-f0ecab193fc1" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1855.352334] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "b788c51b-367b-4eef-93d2-faa8836469b6" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.352536] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "d06f3099-d05f-417f-a71a-7b368590624f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.352736] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "e2a9ab56-bde3-40b6-a214-19c77a9c6778" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.352910] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "e2a9ab56-bde3-40b6-a214-19c77a9c6778" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1855.353146] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "4a15d298-115f-4132-8be0-00e623fa21d8" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.353446] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "b4cd871a-30ea-4b7a-98ad-00b8676dc2cd" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.353640] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "cfe219da-adf9-44b9-9df3-752ccf72a68b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.353843] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "c6dc5401-f59e-4c18-9553-1240e2f49bce" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.354040] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1855.354187] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62684) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1855.354459] env[62684]: DEBUG oslo_vmware.api [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 1855.354459] env[62684]: value = "task-2052665" [ 1855.354459] env[62684]: _type = "Task" [ 1855.354459] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.355226] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80a5aa11-5eaa-4376-88eb-976e374f5adb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.358321] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe219d90-f86a-427f-9a0c-b27ed67c8807 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.361602] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a39a73b-f184-4c99-926f-5cfe9cb5a03a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.364768] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0b1c286-ab17-4b62-8114-a4d4afaf1a48 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.367777] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d45f164c-e82e-4210-851b-1969d14ed438 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.371408] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37533564-9d6a-498d-a5cb-31337dd3034d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.374758] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd976f9b-2c7a-4073-a995-932e0b9aaf3a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.377872] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59f0447d-b990-4892-894b-360596b11fc5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.381084] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5031306c-3a44-4dda-8765-fb480fbb5e74 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.385849] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-793be813-59ed-438f-9929-45e84a932782 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.389034] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9b4fa97-b7f9-4ee6-89d4-e0b6581926f0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.392700] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7159b7f3-ea55-4380-9543-7bd7e7a83b97 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.395230] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1855.407582] env[62684]: INFO nova.compute.manager [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Rebuilding instance [ 1855.444897] env[62684]: WARNING urllib3.connectionpool [-] Connection pool is full, discarding connection: vc1.osci.c.eu-de-1.cloud.sap. Connection pool size: 10: queue.Full [ 1855.453554] env[62684]: WARNING oslo_messaging._drivers.amqpdriver [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Number of call queues is 11, greater than warning threshold: 10. There could be a leak. Increasing threshold to: 20 [ 1855.462831] env[62684]: WARNING urllib3.connectionpool [-] Connection pool is full, discarding connection: vc1.osci.c.eu-de-1.cloud.sap. Connection pool size: 10: queue.Full [ 1855.467572] env[62684]: WARNING urllib3.connectionpool [-] Connection pool is full, discarding connection: vc1.osci.c.eu-de-1.cloud.sap. Connection pool size: 10: queue.Full [ 1855.471397] env[62684]: DEBUG oslo_vmware.api [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2052665, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.508523] env[62684]: DEBUG nova.compute.manager [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1855.508960] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46090e2a-dadf-40b7-83e3-9a23b9aee79f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.577591] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1254983d-cf7e-4507-b977-0c428994dfb4 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] Lock "d532b5fa-90a3-4f25-8684-4eabaa432c86" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1855.581232] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "d532b5fa-90a3-4f25-8684-4eabaa432c86" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.235s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1855.582828] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a035510-2ed5-4463-b6a6-012410d8bc21 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.598040] env[62684]: DEBUG oslo_vmware.api [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052664, 'name': Rename_Task, 'duration_secs': 0.331616} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1855.599139] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1855.600823] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c5bab84a-ffd2-4609-898f-36142e68a66b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.609816] env[62684]: DEBUG oslo_vmware.api [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1855.609816] env[62684]: value = "task-2052666" [ 1855.609816] env[62684]: _type = "Task" [ 1855.609816] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.623985] env[62684]: DEBUG oslo_vmware.api [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052666, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.644093] env[62684]: DEBUG nova.network.neutron [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Updating instance_info_cache with network_info: [{"id": "1333c708-96f6-4c98-bc29-9be57f9be96f", "address": "fa:16:3e:45:eb:99", "network": {"id": "67d45e5a-c931-48e0-8be6-ad19f860ff6f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1665326946-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "843e3293347643789e54644c035332dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1333c708-96", "ovs_interfaceid": "1333c708-96f6-4c98-bc29-9be57f9be96f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1855.732250] env[62684]: DEBUG oslo_vmware.api [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052662, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.762025] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.971s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1855.762025] env[62684]: DEBUG nova.compute.manager [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1855.764022] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.309s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1855.765602] env[62684]: INFO nova.compute.claims [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1855.779594] env[62684]: DEBUG oslo_vmware.api [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052663, 'name': Destroy_Task, 'duration_secs': 0.631832} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1855.779952] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Destroyed the VM [ 1855.782477] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Deleting Snapshot of the VM instance {{(pid=62684) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1855.782477] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-81d4d07f-0aa4-48b6-9f19-2e6eac6a381a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.790331] env[62684]: DEBUG oslo_vmware.api [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1855.790331] env[62684]: value = "task-2052667" [ 1855.790331] env[62684]: _type = "Task" [ 1855.790331] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.803242] env[62684]: DEBUG oslo_vmware.api [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052667, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.817315] env[62684]: DEBUG oslo_concurrency.lockutils [req-42533007-d47c-4d01-8100-e446568af1ac req-52943505-fd28-4565-b01c-5ab605267994 service nova] Releasing lock "refresh_cache-4a15d298-115f-4132-8be0-00e623fa21d8" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1855.901765] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.907949] env[62684]: DEBUG oslo_vmware.api [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2052665, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.496034} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1855.908275] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] b4cd871a-30ea-4b7a-98ad-00b8676dc2cd/b4cd871a-30ea-4b7a-98ad-00b8676dc2cd.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1855.908521] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1855.908803] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c269fdf7-048c-432b-bf7b-5d686c42e643 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.916306] env[62684]: DEBUG oslo_vmware.api [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 1855.916306] env[62684]: value = "task-2052668" [ 1855.916306] env[62684]: _type = "Task" [ 1855.916306] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.924436] env[62684]: DEBUG oslo_vmware.api [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2052668, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.960522] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "0676806b-c1f0-4c1a-a12d-add2edf1588f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.616s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1855.962415] env[62684]: INFO nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] During sync_power_state the instance has a pending task (rebuilding). Skip. [ 1855.962569] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "e2a9ab56-bde3-40b6-a214-19c77a9c6778" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.609s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1855.965066] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "f44b2e88-af6d-4252-b562-9d5fa7745b56" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.614s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1855.972717] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "5bc73032-45f9-4b5c-a4ea-e07c48e4f82b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.625s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1855.993631] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "73f27fc0-ebae-41c7-b292-14396f79a5a2" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.647s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1855.993928] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "dcb0a5b2-379e-44ff-a9b0-be615943c94e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.647s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1855.994300] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "06751c34-0724-44ba-a263-ad27fcf2920f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.646s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1855.994634] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "effc673a-103f-413b-88ac-6907ad1ee852" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.649s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1855.994953] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "0dbd52ac-c987-4728-974e-73e99465c5e7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.646s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1855.995299] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "4e5152b0-7bac-4dc2-b6c7-6590fa2d5978" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.645s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1855.995641] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "a9dfeb4d-a92e-41cf-9d2f-43086cc9e868" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.646s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1855.996142] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "6b1f0e69-3915-40dc-b4ec-93ab174f12b6" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.650s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1856.023927] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1856.024363] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4c9327c4-47f2-4e52-8eda-b0392383cf2f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.031801] env[62684]: DEBUG oslo_vmware.api [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Waiting for the task: (returnval){ [ 1856.031801] env[62684]: value = "task-2052669" [ 1856.031801] env[62684]: _type = "Task" [ 1856.031801] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.042453] env[62684]: DEBUG oslo_vmware.api [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Task: {'id': task-2052669, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.103546] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "d532b5fa-90a3-4f25-8684-4eabaa432c86" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.522s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1856.120452] env[62684]: DEBUG oslo_vmware.api [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052666, 'name': PowerOnVM_Task} progress is 92%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.148611] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Releasing lock "refresh_cache-cfe219da-adf9-44b9-9df3-752ccf72a68b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1856.148737] env[62684]: DEBUG nova.compute.manager [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Instance network_info: |[{"id": "1333c708-96f6-4c98-bc29-9be57f9be96f", "address": "fa:16:3e:45:eb:99", "network": {"id": "67d45e5a-c931-48e0-8be6-ad19f860ff6f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1665326946-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "843e3293347643789e54644c035332dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1333c708-96", "ovs_interfaceid": "1333c708-96f6-4c98-bc29-9be57f9be96f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1856.149587] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:eb:99', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92cdccfd-4b10-4024-b724-5f22792dd4de', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1333c708-96f6-4c98-bc29-9be57f9be96f', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1856.163956] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Creating folder: Project (843e3293347643789e54644c035332dc). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1856.165156] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-522d3e9d-9309-49b2-8afe-181ee603f843 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.180045] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Created folder: Project (843e3293347643789e54644c035332dc) in parent group-v421118. [ 1856.180165] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Creating folder: Instances. Parent ref: group-v421232. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1856.180409] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dbdc38ea-8954-4f48-bfe7-4400eb96f90c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.191809] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Created folder: Instances in parent group-v421232. [ 1856.191809] env[62684]: DEBUG oslo.service.loopingcall [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1856.192146] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1856.192379] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7461fd5a-2886-427b-9c08-77a1c6f95830 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.218121] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1856.218121] env[62684]: value = "task-2052672" [ 1856.218121] env[62684]: _type = "Task" [ 1856.218121] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.230844] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052672, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.236594] env[62684]: DEBUG oslo_vmware.api [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052662, 'name': RemoveSnapshot_Task, 'duration_secs': 1.082689} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.237022] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Deleted Snapshot of the VM instance {{(pid=62684) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1856.237365] env[62684]: INFO nova.compute.manager [None req-1a3476c8-4c1a-4381-a14b-b0d236316b0b tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Took 16.16 seconds to snapshot the instance on the hypervisor. [ 1856.275806] env[62684]: DEBUG nova.compute.utils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1856.282974] env[62684]: DEBUG nova.compute.manager [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1856.282974] env[62684]: DEBUG nova.network.neutron [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1856.305411] env[62684]: DEBUG oslo_vmware.api [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052667, 'name': RemoveSnapshot_Task, 'duration_secs': 0.440597} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.305816] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Deleted Snapshot of the VM instance {{(pid=62684) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1856.306090] env[62684]: INFO nova.compute.manager [None req-a8f0d5cc-84a4-48d7-a608-237ad2347e14 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Took 13.07 seconds to snapshot the instance on the hypervisor. [ 1856.344949] env[62684]: DEBUG nova.policy [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6a592492907b44d1bdf390c83fd54177', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '843e3293347643789e54644c035332dc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1856.426748] env[62684]: DEBUG oslo_vmware.api [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2052668, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068537} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.427113] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1856.427994] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bda88627-8416-412d-8074-162ed2df5d75 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.454153] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] b4cd871a-30ea-4b7a-98ad-00b8676dc2cd/b4cd871a-30ea-4b7a-98ad-00b8676dc2cd.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1856.454869] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d43b4c60-3b10-42f4-8849-170ed649f5d3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.477135] env[62684]: DEBUG oslo_vmware.api [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 1856.477135] env[62684]: value = "task-2052673" [ 1856.477135] env[62684]: _type = "Task" [ 1856.477135] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.487486] env[62684]: DEBUG oslo_vmware.api [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2052673, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.548243] env[62684]: DEBUG oslo_vmware.api [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Task: {'id': task-2052669, 'name': PowerOffVM_Task, 'duration_secs': 0.105357} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.548430] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1856.548593] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1856.549682] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-477a3f95-a462-4045-8d2b-a40601e4b467 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.556761] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1856.557024] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4c223a25-39ea-4fe0-b0ab-15975fc99cf0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.580846] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1856.581095] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1856.581354] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Deleting the datastore file [datastore2] e2a9ab56-bde3-40b6-a214-19c77a9c6778 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1856.581661] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3e234e37-d88f-4bc4-a556-14325d82cfa2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.588468] env[62684]: DEBUG oslo_vmware.api [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Waiting for the task: (returnval){ [ 1856.588468] env[62684]: value = "task-2052675" [ 1856.588468] env[62684]: _type = "Task" [ 1856.588468] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.594395] env[62684]: DEBUG nova.compute.manager [req-e8973080-38ed-4a1b-b4d1-3665591ad7b5 req-81720412-ec6a-42ce-8c82-df64e90ba2da service nova] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Received event network-vif-plugged-8be48385-37eb-4c2e-baf8-404a9aad87de {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1856.594478] env[62684]: DEBUG oslo_concurrency.lockutils [req-e8973080-38ed-4a1b-b4d1-3665591ad7b5 req-81720412-ec6a-42ce-8c82-df64e90ba2da service nova] Acquiring lock "b4cd871a-30ea-4b7a-98ad-00b8676dc2cd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1856.594729] env[62684]: DEBUG oslo_concurrency.lockutils [req-e8973080-38ed-4a1b-b4d1-3665591ad7b5 req-81720412-ec6a-42ce-8c82-df64e90ba2da service nova] Lock "b4cd871a-30ea-4b7a-98ad-00b8676dc2cd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1856.594957] env[62684]: DEBUG oslo_concurrency.lockutils [req-e8973080-38ed-4a1b-b4d1-3665591ad7b5 req-81720412-ec6a-42ce-8c82-df64e90ba2da service nova] Lock "b4cd871a-30ea-4b7a-98ad-00b8676dc2cd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1856.595183] env[62684]: DEBUG nova.compute.manager [req-e8973080-38ed-4a1b-b4d1-3665591ad7b5 req-81720412-ec6a-42ce-8c82-df64e90ba2da service nova] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] No waiting events found dispatching network-vif-plugged-8be48385-37eb-4c2e-baf8-404a9aad87de {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1856.595354] env[62684]: WARNING nova.compute.manager [req-e8973080-38ed-4a1b-b4d1-3665591ad7b5 req-81720412-ec6a-42ce-8c82-df64e90ba2da service nova] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Received unexpected event network-vif-plugged-8be48385-37eb-4c2e-baf8-404a9aad87de for instance with vm_state building and task_state spawning. [ 1856.595602] env[62684]: DEBUG nova.compute.manager [req-e8973080-38ed-4a1b-b4d1-3665591ad7b5 req-81720412-ec6a-42ce-8c82-df64e90ba2da service nova] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Received event network-changed-8be48385-37eb-4c2e-baf8-404a9aad87de {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1856.595813] env[62684]: DEBUG nova.compute.manager [req-e8973080-38ed-4a1b-b4d1-3665591ad7b5 req-81720412-ec6a-42ce-8c82-df64e90ba2da service nova] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Refreshing instance network info cache due to event network-changed-8be48385-37eb-4c2e-baf8-404a9aad87de. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1856.596029] env[62684]: DEBUG oslo_concurrency.lockutils [req-e8973080-38ed-4a1b-b4d1-3665591ad7b5 req-81720412-ec6a-42ce-8c82-df64e90ba2da service nova] Acquiring lock "refresh_cache-b4cd871a-30ea-4b7a-98ad-00b8676dc2cd" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1856.596481] env[62684]: DEBUG oslo_concurrency.lockutils [req-e8973080-38ed-4a1b-b4d1-3665591ad7b5 req-81720412-ec6a-42ce-8c82-df64e90ba2da service nova] Acquired lock "refresh_cache-b4cd871a-30ea-4b7a-98ad-00b8676dc2cd" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1856.596481] env[62684]: DEBUG nova.network.neutron [req-e8973080-38ed-4a1b-b4d1-3665591ad7b5 req-81720412-ec6a-42ce-8c82-df64e90ba2da service nova] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Refreshing network info cache for port 8be48385-37eb-4c2e-baf8-404a9aad87de {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1856.604360] env[62684]: DEBUG oslo_vmware.api [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Task: {'id': task-2052675, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.621736] env[62684]: DEBUG oslo_vmware.api [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052666, 'name': PowerOnVM_Task, 'duration_secs': 0.567604} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.621736] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1856.621736] env[62684]: INFO nova.compute.manager [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Took 8.57 seconds to spawn the instance on the hypervisor. [ 1856.621968] env[62684]: DEBUG nova.compute.manager [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1856.622649] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9992d64-3dbe-4e90-af1f-4400b943f691 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.638543] env[62684]: DEBUG nova.network.neutron [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Successfully created port: be474f46-e2b0-4e78-af9e-c06c7e91756e {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1856.651509] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1254983d-cf7e-4507-b977-0c428994dfb4 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] Acquiring lock "d532b5fa-90a3-4f25-8684-4eabaa432c86" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1856.651784] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1254983d-cf7e-4507-b977-0c428994dfb4 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] Lock "d532b5fa-90a3-4f25-8684-4eabaa432c86" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1856.652035] env[62684]: INFO nova.compute.manager [None req-1254983d-cf7e-4507-b977-0c428994dfb4 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Attaching volume 42560db2-1a6f-4d32-878e-bb36627ec4d0 to /dev/sdb [ 1856.696019] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80cd4986-ac0f-40e1-818b-a3cefe73336a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.700979] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b595555b-5ab3-4a4c-9266-13e53d3616b4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.715338] env[62684]: DEBUG nova.virt.block_device [None req-1254983d-cf7e-4507-b977-0c428994dfb4 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Updating existing volume attachment record: f5db41b9-c790-4fd8-9761-1059b2d4ec76 {{(pid=62684) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1856.728493] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052672, 'name': CreateVM_Task, 'duration_secs': 0.354317} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.729467] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1856.729467] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1856.729588] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1856.729836] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1856.730292] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83847a16-e814-496f-bfab-4e45a928473f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.735610] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Waiting for the task: (returnval){ [ 1856.735610] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52063630-601a-2898-2d35-4d8bfbc09974" [ 1856.735610] env[62684]: _type = "Task" [ 1856.735610] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.745403] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52063630-601a-2898-2d35-4d8bfbc09974, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.783887] env[62684]: DEBUG nova.compute.manager [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1856.990407] env[62684]: DEBUG oslo_vmware.api [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2052673, 'name': ReconfigVM_Task, 'duration_secs': 0.446397} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.990711] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Reconfigured VM instance instance-00000025 to attach disk [datastore1] b4cd871a-30ea-4b7a-98ad-00b8676dc2cd/b4cd871a-30ea-4b7a-98ad-00b8676dc2cd.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1856.991754] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-da2bc6a7-4ca4-4401-98f2-5cfe13305314 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.001196] env[62684]: DEBUG oslo_vmware.api [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 1857.001196] env[62684]: value = "task-2052679" [ 1857.001196] env[62684]: _type = "Task" [ 1857.001196] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1857.010374] env[62684]: DEBUG oslo_vmware.api [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2052679, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.097917] env[62684]: DEBUG oslo_vmware.api [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Task: {'id': task-2052675, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.235127} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1857.098573] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1857.098573] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1857.098573] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1857.112704] env[62684]: DEBUG oslo_concurrency.lockutils [None req-091a64e3-3926-4ff2-8ba5-0483d14d969c tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "e08f8636-5193-40fa-972c-f0ecab193fc1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1857.112978] env[62684]: DEBUG oslo_concurrency.lockutils [None req-091a64e3-3926-4ff2-8ba5-0483d14d969c tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "e08f8636-5193-40fa-972c-f0ecab193fc1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1857.113223] env[62684]: DEBUG oslo_concurrency.lockutils [None req-091a64e3-3926-4ff2-8ba5-0483d14d969c tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "e08f8636-5193-40fa-972c-f0ecab193fc1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1857.113431] env[62684]: DEBUG oslo_concurrency.lockutils [None req-091a64e3-3926-4ff2-8ba5-0483d14d969c tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "e08f8636-5193-40fa-972c-f0ecab193fc1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1857.113613] env[62684]: DEBUG oslo_concurrency.lockutils [None req-091a64e3-3926-4ff2-8ba5-0483d14d969c tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "e08f8636-5193-40fa-972c-f0ecab193fc1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1857.115816] env[62684]: INFO nova.compute.manager [None req-091a64e3-3926-4ff2-8ba5-0483d14d969c tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Terminating instance [ 1857.117695] env[62684]: DEBUG nova.compute.manager [None req-091a64e3-3926-4ff2-8ba5-0483d14d969c tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1857.117923] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-091a64e3-3926-4ff2-8ba5-0483d14d969c tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1857.118772] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a7c01d6-eae4-4278-8a5b-befda97a908d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.127629] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-091a64e3-3926-4ff2-8ba5-0483d14d969c tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1857.130643] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-38350d64-b7ad-4eb7-9507-9ab16955befb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.143418] env[62684]: INFO nova.compute.manager [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Took 44.92 seconds to build instance. [ 1857.218181] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-091a64e3-3926-4ff2-8ba5-0483d14d969c tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1857.218673] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-091a64e3-3926-4ff2-8ba5-0483d14d969c tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1857.218909] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-091a64e3-3926-4ff2-8ba5-0483d14d969c tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Deleting the datastore file [datastore1] e08f8636-5193-40fa-972c-f0ecab193fc1 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1857.223363] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8ae0899f-42be-422d-8363-eca173e99713 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.229636] env[62684]: DEBUG oslo_vmware.api [None req-091a64e3-3926-4ff2-8ba5-0483d14d969c tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1857.229636] env[62684]: value = "task-2052681" [ 1857.229636] env[62684]: _type = "Task" [ 1857.229636] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1857.238623] env[62684]: DEBUG oslo_vmware.api [None req-091a64e3-3926-4ff2-8ba5-0483d14d969c tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052681, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.248270] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52063630-601a-2898-2d35-4d8bfbc09974, 'name': SearchDatastore_Task, 'duration_secs': 0.034634} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1857.250895] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1857.251145] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1857.251379] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1857.251526] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1857.251699] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1857.254125] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3640a1d7-f541-4550-b5e6-d2677e9a2f76 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.265809] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1857.266008] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1857.268871] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0ec5891-8a3b-43eb-ad45-fcb9c1de0599 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.273709] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Waiting for the task: (returnval){ [ 1857.273709] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]526028bc-834e-690a-0cae-02b32bc03445" [ 1857.273709] env[62684]: _type = "Task" [ 1857.273709] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1857.283722] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]526028bc-834e-690a-0cae-02b32bc03445, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.372995] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da53f2fd-5653-4fcd-8b14-145272bfef34 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.386281] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c64d2972-2d71-4c7b-888b-97510494e4c0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.419269] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74e30fe6-29f7-48b5-a3ab-20872ac645c6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.428442] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba7215e1-be22-4855-8062-678eced85e57 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.441819] env[62684]: DEBUG nova.compute.provider_tree [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1857.496422] env[62684]: DEBUG nova.network.neutron [req-e8973080-38ed-4a1b-b4d1-3665591ad7b5 req-81720412-ec6a-42ce-8c82-df64e90ba2da service nova] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Updated VIF entry in instance network info cache for port 8be48385-37eb-4c2e-baf8-404a9aad87de. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1857.496621] env[62684]: DEBUG nova.network.neutron [req-e8973080-38ed-4a1b-b4d1-3665591ad7b5 req-81720412-ec6a-42ce-8c82-df64e90ba2da service nova] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Updating instance_info_cache with network_info: [{"id": "8be48385-37eb-4c2e-baf8-404a9aad87de", "address": "fa:16:3e:33:7c:80", "network": {"id": "64494ea7-f6d9-430c-8ac7-e876e763004b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2056829508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e57b232a7e7647c7a3b2bca3c096feb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8be48385-37", "ovs_interfaceid": "8be48385-37eb-4c2e-baf8-404a9aad87de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1857.511602] env[62684]: DEBUG oslo_vmware.api [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2052679, 'name': Rename_Task, 'duration_secs': 0.220826} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1857.513162] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1857.513162] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4259ff71-744b-4dbf-95d7-f148e99e342f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.521152] env[62684]: DEBUG oslo_vmware.api [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 1857.521152] env[62684]: value = "task-2052682" [ 1857.521152] env[62684]: _type = "Task" [ 1857.521152] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1857.529259] env[62684]: DEBUG oslo_vmware.api [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2052682, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.649340] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6012bc4d-c397-4403-b596-5342282a49ae tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "4a15d298-115f-4132-8be0-00e623fa21d8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.790s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1857.650866] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "4a15d298-115f-4132-8be0-00e623fa21d8" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 2.298s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1857.651738] env[62684]: INFO nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] During sync_power_state the instance has a pending task (spawning). Skip. [ 1857.651738] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "4a15d298-115f-4132-8be0-00e623fa21d8" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1857.742539] env[62684]: DEBUG oslo_vmware.api [None req-091a64e3-3926-4ff2-8ba5-0483d14d969c tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052681, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.2266} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1857.742811] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-091a64e3-3926-4ff2-8ba5-0483d14d969c tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1857.742999] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-091a64e3-3926-4ff2-8ba5-0483d14d969c tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1857.743213] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-091a64e3-3926-4ff2-8ba5-0483d14d969c tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1857.743495] env[62684]: INFO nova.compute.manager [None req-091a64e3-3926-4ff2-8ba5-0483d14d969c tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1857.743782] env[62684]: DEBUG oslo.service.loopingcall [None req-091a64e3-3926-4ff2-8ba5-0483d14d969c tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1857.743990] env[62684]: DEBUG nova.compute.manager [-] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1857.744100] env[62684]: DEBUG nova.network.neutron [-] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1857.789856] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]526028bc-834e-690a-0cae-02b32bc03445, 'name': SearchDatastore_Task, 'duration_secs': 0.036611} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1857.790245] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b5d5ea6-db33-469c-af7b-3593244edd11 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.797199] env[62684]: DEBUG nova.compute.manager [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1857.799712] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Waiting for the task: (returnval){ [ 1857.799712] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]526cfead-3b8f-ef48-8816-978d44014ba2" [ 1857.799712] env[62684]: _type = "Task" [ 1857.799712] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1857.811296] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]526cfead-3b8f-ef48-8816-978d44014ba2, 'name': SearchDatastore_Task, 'duration_secs': 0.00991} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1857.813423] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1857.813423] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] cfe219da-adf9-44b9-9df3-752ccf72a68b/cfe219da-adf9-44b9-9df3-752ccf72a68b.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1857.813423] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ad194168-3ba3-4d22-bbff-acd0b6577c11 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.820651] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Waiting for the task: (returnval){ [ 1857.820651] env[62684]: value = "task-2052683" [ 1857.820651] env[62684]: _type = "Task" [ 1857.820651] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1857.827566] env[62684]: DEBUG nova.virt.hardware [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1857.827825] env[62684]: DEBUG nova.virt.hardware [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1857.827988] env[62684]: DEBUG nova.virt.hardware [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1857.828253] env[62684]: DEBUG nova.virt.hardware [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1857.828543] env[62684]: DEBUG nova.virt.hardware [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1857.830250] env[62684]: DEBUG nova.virt.hardware [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1857.830250] env[62684]: DEBUG nova.virt.hardware [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1857.830250] env[62684]: DEBUG nova.virt.hardware [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1857.830250] env[62684]: DEBUG nova.virt.hardware [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1857.830250] env[62684]: DEBUG nova.virt.hardware [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1857.830493] env[62684]: DEBUG nova.virt.hardware [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1857.831137] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0503f35-dae9-4f6e-bb33-383b3d48ea58 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.837914] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052683, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.844039] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-700e3518-8360-4032-b6fa-d97b73588b41 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.973189] env[62684]: ERROR nova.scheduler.client.report [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [req-ddd98808-c20a-4d50-8ee6-12ab797cc13f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c23c281e-ec1f-4876-972e-a98655f2084f. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ddd98808-c20a-4d50-8ee6-12ab797cc13f"}]} [ 1857.993122] env[62684]: DEBUG nova.scheduler.client.report [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Refreshing inventories for resource provider c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1858.000124] env[62684]: DEBUG oslo_concurrency.lockutils [req-e8973080-38ed-4a1b-b4d1-3665591ad7b5 req-81720412-ec6a-42ce-8c82-df64e90ba2da service nova] Releasing lock "refresh_cache-b4cd871a-30ea-4b7a-98ad-00b8676dc2cd" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1858.000124] env[62684]: DEBUG nova.compute.manager [req-e8973080-38ed-4a1b-b4d1-3665591ad7b5 req-81720412-ec6a-42ce-8c82-df64e90ba2da service nova] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Received event network-vif-plugged-1333c708-96f6-4c98-bc29-9be57f9be96f {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1858.000124] env[62684]: DEBUG oslo_concurrency.lockutils [req-e8973080-38ed-4a1b-b4d1-3665591ad7b5 req-81720412-ec6a-42ce-8c82-df64e90ba2da service nova] Acquiring lock "cfe219da-adf9-44b9-9df3-752ccf72a68b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1858.000124] env[62684]: DEBUG oslo_concurrency.lockutils [req-e8973080-38ed-4a1b-b4d1-3665591ad7b5 req-81720412-ec6a-42ce-8c82-df64e90ba2da service nova] Lock "cfe219da-adf9-44b9-9df3-752ccf72a68b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1858.000327] env[62684]: DEBUG oslo_concurrency.lockutils [req-e8973080-38ed-4a1b-b4d1-3665591ad7b5 req-81720412-ec6a-42ce-8c82-df64e90ba2da service nova] Lock "cfe219da-adf9-44b9-9df3-752ccf72a68b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1858.000469] env[62684]: DEBUG nova.compute.manager [req-e8973080-38ed-4a1b-b4d1-3665591ad7b5 req-81720412-ec6a-42ce-8c82-df64e90ba2da service nova] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] No waiting events found dispatching network-vif-plugged-1333c708-96f6-4c98-bc29-9be57f9be96f {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1858.000584] env[62684]: WARNING nova.compute.manager [req-e8973080-38ed-4a1b-b4d1-3665591ad7b5 req-81720412-ec6a-42ce-8c82-df64e90ba2da service nova] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Received unexpected event network-vif-plugged-1333c708-96f6-4c98-bc29-9be57f9be96f for instance with vm_state building and task_state spawning. [ 1858.000758] env[62684]: DEBUG nova.compute.manager [req-e8973080-38ed-4a1b-b4d1-3665591ad7b5 req-81720412-ec6a-42ce-8c82-df64e90ba2da service nova] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Received event network-changed-1333c708-96f6-4c98-bc29-9be57f9be96f {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1858.000982] env[62684]: DEBUG nova.compute.manager [req-e8973080-38ed-4a1b-b4d1-3665591ad7b5 req-81720412-ec6a-42ce-8c82-df64e90ba2da service nova] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Refreshing instance network info cache due to event network-changed-1333c708-96f6-4c98-bc29-9be57f9be96f. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1858.001354] env[62684]: DEBUG oslo_concurrency.lockutils [req-e8973080-38ed-4a1b-b4d1-3665591ad7b5 req-81720412-ec6a-42ce-8c82-df64e90ba2da service nova] Acquiring lock "refresh_cache-cfe219da-adf9-44b9-9df3-752ccf72a68b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1858.001545] env[62684]: DEBUG oslo_concurrency.lockutils [req-e8973080-38ed-4a1b-b4d1-3665591ad7b5 req-81720412-ec6a-42ce-8c82-df64e90ba2da service nova] Acquired lock "refresh_cache-cfe219da-adf9-44b9-9df3-752ccf72a68b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1858.001761] env[62684]: DEBUG nova.network.neutron [req-e8973080-38ed-4a1b-b4d1-3665591ad7b5 req-81720412-ec6a-42ce-8c82-df64e90ba2da service nova] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Refreshing network info cache for port 1333c708-96f6-4c98-bc29-9be57f9be96f {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1858.013225] env[62684]: DEBUG nova.scheduler.client.report [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Updating ProviderTree inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1858.013568] env[62684]: DEBUG nova.compute.provider_tree [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1858.038090] env[62684]: DEBUG oslo_vmware.api [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2052682, 'name': PowerOnVM_Task, 'duration_secs': 0.48839} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1858.039650] env[62684]: DEBUG nova.scheduler.client.report [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Refreshing aggregate associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, aggregates: None {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1858.042937] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1858.043700] env[62684]: INFO nova.compute.manager [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Took 6.83 seconds to spawn the instance on the hypervisor. [ 1858.043790] env[62684]: DEBUG nova.compute.manager [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1858.046066] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c2ce797-9d7a-44d9-91eb-477a2c6f24de {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.077925] env[62684]: DEBUG nova.scheduler.client.report [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Refreshing trait associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1858.153658] env[62684]: DEBUG nova.compute.manager [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1858.171332] env[62684]: DEBUG nova.virt.hardware [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1858.172046] env[62684]: DEBUG nova.virt.hardware [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1858.172046] env[62684]: DEBUG nova.virt.hardware [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1858.172046] env[62684]: DEBUG nova.virt.hardware [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1858.172187] env[62684]: DEBUG nova.virt.hardware [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1858.172388] env[62684]: DEBUG nova.virt.hardware [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1858.173178] env[62684]: DEBUG nova.virt.hardware [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1858.173178] env[62684]: DEBUG nova.virt.hardware [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1858.173178] env[62684]: DEBUG nova.virt.hardware [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1858.173178] env[62684]: DEBUG nova.virt.hardware [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1858.173385] env[62684]: DEBUG nova.virt.hardware [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1858.174997] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90ca84a8-961d-45da-a1d2-a5aa786a849b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.185758] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b457103-b554-4193-9ca8-4978e49de3f7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.207637] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Instance VIF info [] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1858.215617] env[62684]: DEBUG oslo.service.loopingcall [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1858.220510] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1858.223267] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b69c0fb0-c8d2-49ff-ac03-2fdf8d0ba92d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.239013] env[62684]: DEBUG nova.compute.manager [req-a4b06efd-3875-44b3-a6c8-ac09066179d4 req-8fd76fcf-febe-442f-af86-6f9c6bd2f745 service nova] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Received event network-vif-deleted-fcd83399-5ab1-469a-9bbf-c150314f55dd {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1858.239316] env[62684]: INFO nova.compute.manager [req-a4b06efd-3875-44b3-a6c8-ac09066179d4 req-8fd76fcf-febe-442f-af86-6f9c6bd2f745 service nova] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Neutron deleted interface fcd83399-5ab1-469a-9bbf-c150314f55dd; detaching it from the instance and deleting it from the info cache [ 1858.239551] env[62684]: DEBUG nova.network.neutron [req-a4b06efd-3875-44b3-a6c8-ac09066179d4 req-8fd76fcf-febe-442f-af86-6f9c6bd2f745 service nova] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1858.256204] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1858.256204] env[62684]: value = "task-2052684" [ 1858.256204] env[62684]: _type = "Task" [ 1858.256204] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1858.265797] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "274d214a-4b92-4900-a66c-54baea2a68f8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1858.266151] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "274d214a-4b92-4900-a66c-54baea2a68f8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1858.281288] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052684, 'name': CreateVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.338147] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052683, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.575482] env[62684]: INFO nova.compute.manager [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Took 44.76 seconds to build instance. [ 1858.663981] env[62684]: DEBUG nova.network.neutron [-] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1858.688330] env[62684]: DEBUG oslo_concurrency.lockutils [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1858.701937] env[62684]: DEBUG nova.network.neutron [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Successfully updated port: be474f46-e2b0-4e78-af9e-c06c7e91756e {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1858.750567] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-842d7e4d-490a-4600-aca8-f4fa900a67bc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.769140] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b42f1fc6-8faf-44c7-ac27-883447663e92 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.784562] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56356426-36a2-4d2f-8bf8-827382c855c5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.793466] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052684, 'name': CreateVM_Task, 'duration_secs': 0.503948} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1858.795036] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1858.795490] env[62684]: DEBUG oslo_concurrency.lockutils [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1858.795658] env[62684]: DEBUG oslo_concurrency.lockutils [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1858.795989] env[62684]: DEBUG oslo_concurrency.lockutils [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1858.797133] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40e83a4a-eba5-4068-a400-dba4e7be2470 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.812426] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbd86e61-5b57-43bb-9988-c9e5b9ae8e11 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.814463] env[62684]: DEBUG nova.compute.manager [req-a4b06efd-3875-44b3-a6c8-ac09066179d4 req-8fd76fcf-febe-442f-af86-6f9c6bd2f745 service nova] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Detach interface failed, port_id=fcd83399-5ab1-469a-9bbf-c150314f55dd, reason: Instance e08f8636-5193-40fa-972c-f0ecab193fc1 could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1858.856275] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6778f712-189b-4a90-beca-26277a60c058 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.860260] env[62684]: DEBUG nova.compute.manager [req-7fdaa302-2fe1-421d-b188-9dcc22eea922 req-8b1f22f2-4717-4c78-ab63-51b9e938db2c service nova] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Received event network-vif-plugged-be474f46-e2b0-4e78-af9e-c06c7e91756e {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1858.860488] env[62684]: DEBUG oslo_concurrency.lockutils [req-7fdaa302-2fe1-421d-b188-9dcc22eea922 req-8b1f22f2-4717-4c78-ab63-51b9e938db2c service nova] Acquiring lock "c6dc5401-f59e-4c18-9553-1240e2f49bce-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1858.860701] env[62684]: DEBUG oslo_concurrency.lockutils [req-7fdaa302-2fe1-421d-b188-9dcc22eea922 req-8b1f22f2-4717-4c78-ab63-51b9e938db2c service nova] Lock "c6dc5401-f59e-4c18-9553-1240e2f49bce-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1858.860868] env[62684]: DEBUG oslo_concurrency.lockutils [req-7fdaa302-2fe1-421d-b188-9dcc22eea922 req-8b1f22f2-4717-4c78-ab63-51b9e938db2c service nova] Lock "c6dc5401-f59e-4c18-9553-1240e2f49bce-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1858.861445] env[62684]: DEBUG nova.compute.manager [req-7fdaa302-2fe1-421d-b188-9dcc22eea922 req-8b1f22f2-4717-4c78-ab63-51b9e938db2c service nova] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] No waiting events found dispatching network-vif-plugged-be474f46-e2b0-4e78-af9e-c06c7e91756e {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1858.861445] env[62684]: WARNING nova.compute.manager [req-7fdaa302-2fe1-421d-b188-9dcc22eea922 req-8b1f22f2-4717-4c78-ab63-51b9e938db2c service nova] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Received unexpected event network-vif-plugged-be474f46-e2b0-4e78-af9e-c06c7e91756e for instance with vm_state building and task_state spawning. [ 1858.861594] env[62684]: DEBUG oslo_vmware.api [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Waiting for the task: (returnval){ [ 1858.861594] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c436bc-e05b-0253-49e9-abcd1ab03d87" [ 1858.861594] env[62684]: _type = "Task" [ 1858.861594] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1858.872876] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052683, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.547844} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1858.875396] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] cfe219da-adf9-44b9-9df3-752ccf72a68b/cfe219da-adf9-44b9-9df3-752ccf72a68b.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1858.875396] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1858.875396] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7fc070b-31ee-4443-9597-17f0e1b76b96 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.883328] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4dcd0142-15b3-414e-811e-0b6f6c6dd04b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.886942] env[62684]: DEBUG oslo_vmware.api [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c436bc-e05b-0253-49e9-abcd1ab03d87, 'name': SearchDatastore_Task, 'duration_secs': 0.015353} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1858.886942] env[62684]: DEBUG oslo_concurrency.lockutils [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1858.887243] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1858.887398] env[62684]: DEBUG oslo_concurrency.lockutils [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1858.887521] env[62684]: DEBUG oslo_concurrency.lockutils [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1858.887746] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1858.890033] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3e7915bd-b039-4554-98a0-e5f0ffa17db3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.901196] env[62684]: DEBUG nova.compute.provider_tree [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1858.906344] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Waiting for the task: (returnval){ [ 1858.906344] env[62684]: value = "task-2052685" [ 1858.906344] env[62684]: _type = "Task" [ 1858.906344] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1858.911167] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1858.911396] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1858.915224] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72d1ef51-8157-4be9-9e27-d00fdaf5da6b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.918219] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052685, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.922802] env[62684]: DEBUG oslo_vmware.api [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Waiting for the task: (returnval){ [ 1858.922802] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ed6713-af42-499e-b022-1196e3b4877f" [ 1858.922802] env[62684]: _type = "Task" [ 1858.922802] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1858.941927] env[62684]: DEBUG oslo_vmware.api [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ed6713-af42-499e-b022-1196e3b4877f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1859.003653] env[62684]: DEBUG nova.network.neutron [req-e8973080-38ed-4a1b-b4d1-3665591ad7b5 req-81720412-ec6a-42ce-8c82-df64e90ba2da service nova] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Updated VIF entry in instance network info cache for port 1333c708-96f6-4c98-bc29-9be57f9be96f. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1859.003653] env[62684]: DEBUG nova.network.neutron [req-e8973080-38ed-4a1b-b4d1-3665591ad7b5 req-81720412-ec6a-42ce-8c82-df64e90ba2da service nova] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Updating instance_info_cache with network_info: [{"id": "1333c708-96f6-4c98-bc29-9be57f9be96f", "address": "fa:16:3e:45:eb:99", "network": {"id": "67d45e5a-c931-48e0-8be6-ad19f860ff6f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1665326946-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "843e3293347643789e54644c035332dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1333c708-96", "ovs_interfaceid": "1333c708-96f6-4c98-bc29-9be57f9be96f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1859.077896] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0546b2cb-7f1c-405f-abf5-d157da407489 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lock "b4cd871a-30ea-4b7a-98ad-00b8676dc2cd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.839s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1859.079333] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "b4cd871a-30ea-4b7a-98ad-00b8676dc2cd" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 3.726s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1859.079684] env[62684]: INFO nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] During sync_power_state the instance has a pending task (spawning). Skip. [ 1859.079880] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "b4cd871a-30ea-4b7a-98ad-00b8676dc2cd" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1859.169910] env[62684]: INFO nova.compute.manager [-] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Took 1.42 seconds to deallocate network for instance. [ 1859.205315] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Acquiring lock "refresh_cache-c6dc5401-f59e-4c18-9553-1240e2f49bce" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1859.205583] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Acquired lock "refresh_cache-c6dc5401-f59e-4c18-9553-1240e2f49bce" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1859.205767] env[62684]: DEBUG nova.network.neutron [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1859.265433] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquiring lock "e3dd1bc0-f292-4ac7-a8db-324887a18411" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1859.265666] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "e3dd1bc0-f292-4ac7-a8db-324887a18411" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1859.421017] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052685, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.110263} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1859.421440] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1859.422596] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcd938df-d542-40eb-a3de-f5bb6cc83da2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.429286] env[62684]: ERROR nova.scheduler.client.report [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [req-28f61450-152d-424c-91fa-c1615f322b4a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c23c281e-ec1f-4876-972e-a98655f2084f. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-28f61450-152d-424c-91fa-c1615f322b4a"}]} [ 1859.450299] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] cfe219da-adf9-44b9-9df3-752ccf72a68b/cfe219da-adf9-44b9-9df3-752ccf72a68b.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1859.455493] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-92012495-78c7-439e-acab-dc0ac96d40b2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.468138] env[62684]: DEBUG oslo_vmware.api [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ed6713-af42-499e-b022-1196e3b4877f, 'name': SearchDatastore_Task, 'duration_secs': 0.013183} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1859.469307] env[62684]: DEBUG nova.scheduler.client.report [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Refreshing inventories for resource provider c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1859.472431] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13b48641-0da8-43f3-81cb-655b2a518d9d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.477548] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Waiting for the task: (returnval){ [ 1859.477548] env[62684]: value = "task-2052687" [ 1859.477548] env[62684]: _type = "Task" [ 1859.477548] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1859.478817] env[62684]: DEBUG oslo_vmware.api [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Waiting for the task: (returnval){ [ 1859.478817] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520b9d29-dec3-9632-f92d-bea6f7249040" [ 1859.478817] env[62684]: _type = "Task" [ 1859.478817] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1859.486621] env[62684]: DEBUG nova.scheduler.client.report [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Updating ProviderTree inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1859.486868] env[62684]: DEBUG nova.compute.provider_tree [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1859.494785] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052687, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1859.495152] env[62684]: DEBUG oslo_vmware.api [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520b9d29-dec3-9632-f92d-bea6f7249040, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1859.507016] env[62684]: DEBUG oslo_concurrency.lockutils [req-e8973080-38ed-4a1b-b4d1-3665591ad7b5 req-81720412-ec6a-42ce-8c82-df64e90ba2da service nova] Releasing lock "refresh_cache-cfe219da-adf9-44b9-9df3-752ccf72a68b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1859.508434] env[62684]: DEBUG nova.scheduler.client.report [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Refreshing aggregate associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, aggregates: None {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1859.531184] env[62684]: DEBUG nova.scheduler.client.report [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Refreshing trait associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1859.581805] env[62684]: DEBUG nova.compute.manager [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1859.680210] env[62684]: DEBUG oslo_concurrency.lockutils [None req-091a64e3-3926-4ff2-8ba5-0483d14d969c tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1859.765617] env[62684]: DEBUG nova.network.neutron [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1859.975043] env[62684]: DEBUG nova.network.neutron [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Updating instance_info_cache with network_info: [{"id": "be474f46-e2b0-4e78-af9e-c06c7e91756e", "address": "fa:16:3e:87:25:bd", "network": {"id": "67d45e5a-c931-48e0-8be6-ad19f860ff6f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1665326946-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "843e3293347643789e54644c035332dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe474f46-e2", "ovs_interfaceid": "be474f46-e2b0-4e78-af9e-c06c7e91756e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1860.002951] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052687, 'name': ReconfigVM_Task, 'duration_secs': 0.256003} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1860.008701] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Reconfigured VM instance instance-00000026 to attach disk [datastore1] cfe219da-adf9-44b9-9df3-752ccf72a68b/cfe219da-adf9-44b9-9df3-752ccf72a68b.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1860.010376] env[62684]: DEBUG oslo_vmware.api [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520b9d29-dec3-9632-f92d-bea6f7249040, 'name': SearchDatastore_Task, 'duration_secs': 0.026952} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1860.010625] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-385bd424-2314-40d6-b39c-db4769eacf13 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.012471] env[62684]: DEBUG oslo_concurrency.lockutils [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1860.012746] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] e2a9ab56-bde3-40b6-a214-19c77a9c6778/e2a9ab56-bde3-40b6-a214-19c77a9c6778.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1860.015909] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d344d7ce-5707-4b4a-9582-fb6138bf9d88 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.025840] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Waiting for the task: (returnval){ [ 1860.025840] env[62684]: value = "task-2052688" [ 1860.025840] env[62684]: _type = "Task" [ 1860.025840] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1860.026452] env[62684]: DEBUG oslo_vmware.api [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Waiting for the task: (returnval){ [ 1860.026452] env[62684]: value = "task-2052689" [ 1860.026452] env[62684]: _type = "Task" [ 1860.026452] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1860.048255] env[62684]: DEBUG oslo_vmware.api [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Task: {'id': task-2052689, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.049012] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052688, 'name': Rename_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.114614] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1860.155717] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a119684-0e3f-4fda-b7b2-3fb106c2af62 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.165410] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbe845df-43de-4f3b-a29c-29bf44977b32 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.198553] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7dbb937-0331-4e72-9349-d4ac0cb4efcf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.207162] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75c6d66e-4ba3-4118-96e7-8453de85b384 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.221102] env[62684]: DEBUG nova.compute.provider_tree [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1860.428013] env[62684]: DEBUG oslo_concurrency.lockutils [None req-abdd27a4-e762-4e3b-91f4-420d0975ed68 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Acquiring lock "a9dfeb4d-a92e-41cf-9d2f-43086cc9e868" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1860.428364] env[62684]: DEBUG oslo_concurrency.lockutils [None req-abdd27a4-e762-4e3b-91f4-420d0975ed68 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Lock "a9dfeb4d-a92e-41cf-9d2f-43086cc9e868" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1860.428589] env[62684]: DEBUG oslo_concurrency.lockutils [None req-abdd27a4-e762-4e3b-91f4-420d0975ed68 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Acquiring lock "a9dfeb4d-a92e-41cf-9d2f-43086cc9e868-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1860.428781] env[62684]: DEBUG oslo_concurrency.lockutils [None req-abdd27a4-e762-4e3b-91f4-420d0975ed68 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Lock "a9dfeb4d-a92e-41cf-9d2f-43086cc9e868-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1860.428956] env[62684]: DEBUG oslo_concurrency.lockutils [None req-abdd27a4-e762-4e3b-91f4-420d0975ed68 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Lock "a9dfeb4d-a92e-41cf-9d2f-43086cc9e868-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1860.431126] env[62684]: INFO nova.compute.manager [None req-abdd27a4-e762-4e3b-91f4-420d0975ed68 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Terminating instance [ 1860.434983] env[62684]: DEBUG nova.compute.manager [None req-abdd27a4-e762-4e3b-91f4-420d0975ed68 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1860.435111] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-abdd27a4-e762-4e3b-91f4-420d0975ed68 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1860.436032] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34be4bed-1b5e-4a9b-8f79-264097f66d4e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.443817] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-abdd27a4-e762-4e3b-91f4-420d0975ed68 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1860.444075] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1194d02b-b0d7-4470-bd5d-db6e4fe97cfb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.451012] env[62684]: DEBUG oslo_vmware.api [None req-abdd27a4-e762-4e3b-91f4-420d0975ed68 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Waiting for the task: (returnval){ [ 1860.451012] env[62684]: value = "task-2052690" [ 1860.451012] env[62684]: _type = "Task" [ 1860.451012] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1860.459435] env[62684]: DEBUG oslo_vmware.api [None req-abdd27a4-e762-4e3b-91f4-420d0975ed68 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052690, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.477881] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Releasing lock "refresh_cache-c6dc5401-f59e-4c18-9553-1240e2f49bce" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1860.478052] env[62684]: DEBUG nova.compute.manager [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Instance network_info: |[{"id": "be474f46-e2b0-4e78-af9e-c06c7e91756e", "address": "fa:16:3e:87:25:bd", "network": {"id": "67d45e5a-c931-48e0-8be6-ad19f860ff6f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1665326946-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "843e3293347643789e54644c035332dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe474f46-e2", "ovs_interfaceid": "be474f46-e2b0-4e78-af9e-c06c7e91756e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1860.479304] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:87:25:bd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92cdccfd-4b10-4024-b724-5f22792dd4de', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'be474f46-e2b0-4e78-af9e-c06c7e91756e', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1860.487721] env[62684]: DEBUG oslo.service.loopingcall [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1860.487721] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1860.487721] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-419bc672-fc4d-4690-b879-830f69ece7f5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.510326] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1860.510326] env[62684]: value = "task-2052691" [ 1860.510326] env[62684]: _type = "Task" [ 1860.510326] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1860.518266] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052691, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.540319] env[62684]: DEBUG oslo_vmware.api [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Task: {'id': task-2052689, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.544954] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052688, 'name': Rename_Task, 'duration_secs': 0.135304} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1860.545382] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1860.545777] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ea496d10-c269-497a-905d-a9a78bcd64d6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.554440] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Waiting for the task: (returnval){ [ 1860.554440] env[62684]: value = "task-2052692" [ 1860.554440] env[62684]: _type = "Task" [ 1860.554440] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1860.565011] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052692, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.725042] env[62684]: DEBUG nova.scheduler.client.report [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1860.871908] env[62684]: DEBUG nova.compute.manager [req-3c9a1b72-6bb7-4209-bdb6-8f63f345f7d2 req-861e3fe7-1f1c-4a65-807b-d44f11d405a6 service nova] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Received event network-changed-be474f46-e2b0-4e78-af9e-c06c7e91756e {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1860.872815] env[62684]: DEBUG nova.compute.manager [req-3c9a1b72-6bb7-4209-bdb6-8f63f345f7d2 req-861e3fe7-1f1c-4a65-807b-d44f11d405a6 service nova] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Refreshing instance network info cache due to event network-changed-be474f46-e2b0-4e78-af9e-c06c7e91756e. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1860.872815] env[62684]: DEBUG oslo_concurrency.lockutils [req-3c9a1b72-6bb7-4209-bdb6-8f63f345f7d2 req-861e3fe7-1f1c-4a65-807b-d44f11d405a6 service nova] Acquiring lock "refresh_cache-c6dc5401-f59e-4c18-9553-1240e2f49bce" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1860.872815] env[62684]: DEBUG oslo_concurrency.lockutils [req-3c9a1b72-6bb7-4209-bdb6-8f63f345f7d2 req-861e3fe7-1f1c-4a65-807b-d44f11d405a6 service nova] Acquired lock "refresh_cache-c6dc5401-f59e-4c18-9553-1240e2f49bce" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1860.872815] env[62684]: DEBUG nova.network.neutron [req-3c9a1b72-6bb7-4209-bdb6-8f63f345f7d2 req-861e3fe7-1f1c-4a65-807b-d44f11d405a6 service nova] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Refreshing network info cache for port be474f46-e2b0-4e78-af9e-c06c7e91756e {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1860.968487] env[62684]: DEBUG oslo_vmware.api [None req-abdd27a4-e762-4e3b-91f4-420d0975ed68 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052690, 'name': PowerOffVM_Task, 'duration_secs': 0.275506} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1860.968926] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-abdd27a4-e762-4e3b-91f4-420d0975ed68 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1860.969146] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-abdd27a4-e762-4e3b-91f4-420d0975ed68 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1860.969507] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3c63e9b9-49b9-41dd-bf76-b3d15f657833 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.020969] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052691, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.037167] env[62684]: DEBUG oslo_vmware.api [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Task: {'id': task-2052689, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.831761} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1861.037435] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] e2a9ab56-bde3-40b6-a214-19c77a9c6778/e2a9ab56-bde3-40b6-a214-19c77a9c6778.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1861.037762] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1861.038083] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fc43badc-a90b-4dda-aff7-44c67ab9d557 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.046620] env[62684]: DEBUG oslo_vmware.api [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Waiting for the task: (returnval){ [ 1861.046620] env[62684]: value = "task-2052694" [ 1861.046620] env[62684]: _type = "Task" [ 1861.046620] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1861.064903] env[62684]: DEBUG oslo_vmware.api [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Task: {'id': task-2052694, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.067847] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-abdd27a4-e762-4e3b-91f4-420d0975ed68 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1861.067987] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-abdd27a4-e762-4e3b-91f4-420d0975ed68 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1861.068149] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-abdd27a4-e762-4e3b-91f4-420d0975ed68 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Deleting the datastore file [datastore2] a9dfeb4d-a92e-41cf-9d2f-43086cc9e868 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1861.068419] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bbc95923-af7a-4fcb-a857-73065ab17f39 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.074687] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052692, 'name': PowerOnVM_Task, 'duration_secs': 0.513144} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1861.075374] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1861.075608] env[62684]: INFO nova.compute.manager [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Took 7.25 seconds to spawn the instance on the hypervisor. [ 1861.075835] env[62684]: DEBUG nova.compute.manager [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1861.076701] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34f6088b-de06-48f4-8c70-ff7b340c2418 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.081727] env[62684]: DEBUG oslo_vmware.api [None req-abdd27a4-e762-4e3b-91f4-420d0975ed68 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Waiting for the task: (returnval){ [ 1861.081727] env[62684]: value = "task-2052695" [ 1861.081727] env[62684]: _type = "Task" [ 1861.081727] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1861.095138] env[62684]: DEBUG oslo_vmware.api [None req-abdd27a4-e762-4e3b-91f4-420d0975ed68 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052695, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.231345] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 5.467s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1861.231819] env[62684]: DEBUG nova.compute.manager [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1861.236118] env[62684]: DEBUG oslo_concurrency.lockutils [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.944s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1861.237731] env[62684]: INFO nova.compute.claims [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1861.267631] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-1254983d-cf7e-4507-b977-0c428994dfb4 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Volume attach. Driver type: vmdk {{(pid=62684) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1861.268236] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-1254983d-cf7e-4507-b977-0c428994dfb4 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421237', 'volume_id': '42560db2-1a6f-4d32-878e-bb36627ec4d0', 'name': 'volume-42560db2-1a6f-4d32-878e-bb36627ec4d0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd532b5fa-90a3-4f25-8684-4eabaa432c86', 'attached_at': '', 'detached_at': '', 'volume_id': '42560db2-1a6f-4d32-878e-bb36627ec4d0', 'serial': '42560db2-1a6f-4d32-878e-bb36627ec4d0'} {{(pid=62684) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1861.268908] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49eed2cf-fa73-4e7c-b2d0-77fce30e1852 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.287680] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a23e0d9-2b73-4fd1-804e-b06be6619074 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.314853] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-1254983d-cf7e-4507-b977-0c428994dfb4 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Reconfiguring VM instance instance-0000000c to attach disk [datastore1] volume-42560db2-1a6f-4d32-878e-bb36627ec4d0/volume-42560db2-1a6f-4d32-878e-bb36627ec4d0.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1861.315206] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-44b625f1-e821-4c37-be3d-1868999273ef {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.334895] env[62684]: DEBUG oslo_vmware.api [None req-1254983d-cf7e-4507-b977-0c428994dfb4 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] Waiting for the task: (returnval){ [ 1861.334895] env[62684]: value = "task-2052696" [ 1861.334895] env[62684]: _type = "Task" [ 1861.334895] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1861.343031] env[62684]: DEBUG oslo_vmware.api [None req-1254983d-cf7e-4507-b977-0c428994dfb4 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] Task: {'id': task-2052696, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.520856] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052691, 'name': CreateVM_Task, 'duration_secs': 0.668428} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1861.522274] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1861.523181] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1861.523251] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1861.523657] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1861.523932] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2453db34-0b73-46e7-85ee-3fd56c282410 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.529759] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Waiting for the task: (returnval){ [ 1861.529759] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52408fca-b6d3-9d0b-8930-942684bdf6c4" [ 1861.529759] env[62684]: _type = "Task" [ 1861.529759] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1861.540035] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52408fca-b6d3-9d0b-8930-942684bdf6c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.561598] env[62684]: DEBUG oslo_vmware.api [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Task: {'id': task-2052694, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072626} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1861.564634] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1861.565542] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f173934-4ed0-4fa6-b9df-55230fcee73b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.586181] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Reconfiguring VM instance instance-00000023 to attach disk [datastore1] e2a9ab56-bde3-40b6-a214-19c77a9c6778/e2a9ab56-bde3-40b6-a214-19c77a9c6778.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1861.586581] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-15c7426c-70ef-4f25-90b9-e70d63dfa687 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.611602] env[62684]: INFO nova.compute.manager [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Took 44.83 seconds to build instance. [ 1861.615823] env[62684]: DEBUG oslo_vmware.api [None req-abdd27a4-e762-4e3b-91f4-420d0975ed68 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052695, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.413197} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1861.617035] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-abdd27a4-e762-4e3b-91f4-420d0975ed68 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1861.617227] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-abdd27a4-e762-4e3b-91f4-420d0975ed68 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1861.617405] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-abdd27a4-e762-4e3b-91f4-420d0975ed68 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1861.617573] env[62684]: INFO nova.compute.manager [None req-abdd27a4-e762-4e3b-91f4-420d0975ed68 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1861.617817] env[62684]: DEBUG oslo.service.loopingcall [None req-abdd27a4-e762-4e3b-91f4-420d0975ed68 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1861.618097] env[62684]: DEBUG oslo_vmware.api [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Waiting for the task: (returnval){ [ 1861.618097] env[62684]: value = "task-2052697" [ 1861.618097] env[62684]: _type = "Task" [ 1861.618097] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1861.618286] env[62684]: DEBUG nova.compute.manager [-] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1861.618378] env[62684]: DEBUG nova.network.neutron [-] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1861.629785] env[62684]: DEBUG oslo_vmware.api [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Task: {'id': task-2052697, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.658257] env[62684]: DEBUG nova.network.neutron [req-3c9a1b72-6bb7-4209-bdb6-8f63f345f7d2 req-861e3fe7-1f1c-4a65-807b-d44f11d405a6 service nova] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Updated VIF entry in instance network info cache for port be474f46-e2b0-4e78-af9e-c06c7e91756e. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1861.658633] env[62684]: DEBUG nova.network.neutron [req-3c9a1b72-6bb7-4209-bdb6-8f63f345f7d2 req-861e3fe7-1f1c-4a65-807b-d44f11d405a6 service nova] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Updating instance_info_cache with network_info: [{"id": "be474f46-e2b0-4e78-af9e-c06c7e91756e", "address": "fa:16:3e:87:25:bd", "network": {"id": "67d45e5a-c931-48e0-8be6-ad19f860ff6f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1665326946-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "843e3293347643789e54644c035332dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe474f46-e2", "ovs_interfaceid": "be474f46-e2b0-4e78-af9e-c06c7e91756e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1861.742867] env[62684]: DEBUG nova.compute.utils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1861.747873] env[62684]: DEBUG nova.compute.manager [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1861.748198] env[62684]: DEBUG nova.network.neutron [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1861.794880] env[62684]: DEBUG nova.policy [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6a592492907b44d1bdf390c83fd54177', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '843e3293347643789e54644c035332dc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1861.849655] env[62684]: DEBUG oslo_vmware.api [None req-1254983d-cf7e-4507-b977-0c428994dfb4 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] Task: {'id': task-2052696, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.039671] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52408fca-b6d3-9d0b-8930-942684bdf6c4, 'name': SearchDatastore_Task, 'duration_secs': 0.028059} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1862.039974] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1862.040246] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1862.040481] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1862.040631] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1862.040810] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1862.041086] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b76fbab6-0aa6-429b-9bf4-e18e48b20b87 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.057800] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1862.057991] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1862.059050] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d87d477-6681-41a4-82de-2296e1c987a9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.066949] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Waiting for the task: (returnval){ [ 1862.066949] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523993f2-5e55-bfd5-5b29-3ad36baf13d9" [ 1862.066949] env[62684]: _type = "Task" [ 1862.066949] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1862.075685] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523993f2-5e55-bfd5-5b29-3ad36baf13d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.114767] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Lock "cfe219da-adf9-44b9-9df3-752ccf72a68b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.148s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1862.119490] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "cfe219da-adf9-44b9-9df3-752ccf72a68b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 6.763s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1862.119490] env[62684]: INFO nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] During sync_power_state the instance has a pending task (spawning). Skip. [ 1862.119490] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "cfe219da-adf9-44b9-9df3-752ccf72a68b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1862.131961] env[62684]: DEBUG oslo_vmware.api [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Task: {'id': task-2052697, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.162379] env[62684]: DEBUG oslo_concurrency.lockutils [req-3c9a1b72-6bb7-4209-bdb6-8f63f345f7d2 req-861e3fe7-1f1c-4a65-807b-d44f11d405a6 service nova] Releasing lock "refresh_cache-c6dc5401-f59e-4c18-9553-1240e2f49bce" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1862.162619] env[62684]: DEBUG nova.compute.manager [req-3c9a1b72-6bb7-4209-bdb6-8f63f345f7d2 req-861e3fe7-1f1c-4a65-807b-d44f11d405a6 service nova] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Received event network-changed-8be48385-37eb-4c2e-baf8-404a9aad87de {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1862.162776] env[62684]: DEBUG nova.compute.manager [req-3c9a1b72-6bb7-4209-bdb6-8f63f345f7d2 req-861e3fe7-1f1c-4a65-807b-d44f11d405a6 service nova] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Refreshing instance network info cache due to event network-changed-8be48385-37eb-4c2e-baf8-404a9aad87de. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1862.163047] env[62684]: DEBUG oslo_concurrency.lockutils [req-3c9a1b72-6bb7-4209-bdb6-8f63f345f7d2 req-861e3fe7-1f1c-4a65-807b-d44f11d405a6 service nova] Acquiring lock "refresh_cache-b4cd871a-30ea-4b7a-98ad-00b8676dc2cd" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1862.163154] env[62684]: DEBUG oslo_concurrency.lockutils [req-3c9a1b72-6bb7-4209-bdb6-8f63f345f7d2 req-861e3fe7-1f1c-4a65-807b-d44f11d405a6 service nova] Acquired lock "refresh_cache-b4cd871a-30ea-4b7a-98ad-00b8676dc2cd" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1862.163539] env[62684]: DEBUG nova.network.neutron [req-3c9a1b72-6bb7-4209-bdb6-8f63f345f7d2 req-861e3fe7-1f1c-4a65-807b-d44f11d405a6 service nova] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Refreshing network info cache for port 8be48385-37eb-4c2e-baf8-404a9aad87de {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1862.223133] env[62684]: DEBUG nova.network.neutron [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Successfully created port: c4406072-51a1-483b-89d6-d1b7ed992955 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1862.252438] env[62684]: DEBUG nova.compute.manager [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1862.351484] env[62684]: DEBUG oslo_vmware.api [None req-1254983d-cf7e-4507-b977-0c428994dfb4 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] Task: {'id': task-2052696, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.514012] env[62684]: DEBUG nova.network.neutron [-] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1862.579096] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523993f2-5e55-bfd5-5b29-3ad36baf13d9, 'name': SearchDatastore_Task, 'duration_secs': 0.015441} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1862.579909] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-657fd886-8faf-47b5-bac8-20c5dbdbc866 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.585749] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Waiting for the task: (returnval){ [ 1862.585749] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a8a3aa-630c-af32-337c-8f5fb3e0e3d9" [ 1862.585749] env[62684]: _type = "Task" [ 1862.585749] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1862.595456] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a8a3aa-630c-af32-337c-8f5fb3e0e3d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.618968] env[62684]: DEBUG nova.compute.manager [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1862.632878] env[62684]: DEBUG oslo_vmware.api [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Task: {'id': task-2052697, 'name': ReconfigVM_Task, 'duration_secs': 0.868509} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1862.633211] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Reconfigured VM instance instance-00000023 to attach disk [datastore1] e2a9ab56-bde3-40b6-a214-19c77a9c6778/e2a9ab56-bde3-40b6-a214-19c77a9c6778.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1862.633848] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7915f887-d90c-4dda-a087-8c68a98af441 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.642218] env[62684]: DEBUG oslo_vmware.api [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Waiting for the task: (returnval){ [ 1862.642218] env[62684]: value = "task-2052698" [ 1862.642218] env[62684]: _type = "Task" [ 1862.642218] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1862.649323] env[62684]: DEBUG oslo_vmware.api [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Task: {'id': task-2052698, 'name': Rename_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.777175] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f62dbd96-b5b5-45e3-b904-5cb42eab7840 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.784859] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eebe54fc-349e-446e-92dc-f7c79542a291 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.818102] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95b56a07-8922-4d44-9dbf-8024c968495b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.827908] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-813daa0f-deb3-4eb9-b79f-5db9a073d0b4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.843228] env[62684]: DEBUG nova.compute.provider_tree [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1862.852659] env[62684]: DEBUG oslo_vmware.api [None req-1254983d-cf7e-4507-b977-0c428994dfb4 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] Task: {'id': task-2052696, 'name': ReconfigVM_Task, 'duration_secs': 1.207272} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1862.852930] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-1254983d-cf7e-4507-b977-0c428994dfb4 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Reconfigured VM instance instance-0000000c to attach disk [datastore1] volume-42560db2-1a6f-4d32-878e-bb36627ec4d0/volume-42560db2-1a6f-4d32-878e-bb36627ec4d0.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1862.857919] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eca8bd93-b670-45d0-ab1e-ee3316148dce {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.874563] env[62684]: DEBUG oslo_vmware.api [None req-1254983d-cf7e-4507-b977-0c428994dfb4 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] Waiting for the task: (returnval){ [ 1862.874563] env[62684]: value = "task-2052699" [ 1862.874563] env[62684]: _type = "Task" [ 1862.874563] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1862.883622] env[62684]: DEBUG oslo_vmware.api [None req-1254983d-cf7e-4507-b977-0c428994dfb4 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] Task: {'id': task-2052699, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.894806] env[62684]: DEBUG nova.compute.manager [req-2a8a5c97-de36-45da-9803-d406ca9029bd req-f6cd9f63-ea05-4228-b7a8-857741e90e24 service nova] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Received event network-vif-deleted-aa71af47-855f-4fc6-9a8d-ca724cde3d12 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1862.909431] env[62684]: DEBUG nova.network.neutron [req-3c9a1b72-6bb7-4209-bdb6-8f63f345f7d2 req-861e3fe7-1f1c-4a65-807b-d44f11d405a6 service nova] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Updated VIF entry in instance network info cache for port 8be48385-37eb-4c2e-baf8-404a9aad87de. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1862.909810] env[62684]: DEBUG nova.network.neutron [req-3c9a1b72-6bb7-4209-bdb6-8f63f345f7d2 req-861e3fe7-1f1c-4a65-807b-d44f11d405a6 service nova] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Updating instance_info_cache with network_info: [{"id": "8be48385-37eb-4c2e-baf8-404a9aad87de", "address": "fa:16:3e:33:7c:80", "network": {"id": "64494ea7-f6d9-430c-8ac7-e876e763004b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2056829508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.249", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e57b232a7e7647c7a3b2bca3c096feb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8be48385-37", "ovs_interfaceid": "8be48385-37eb-4c2e-baf8-404a9aad87de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1863.016413] env[62684]: INFO nova.compute.manager [-] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Took 1.40 seconds to deallocate network for instance. [ 1863.097195] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a8a3aa-630c-af32-337c-8f5fb3e0e3d9, 'name': SearchDatastore_Task, 'duration_secs': 0.047124} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1863.097486] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1863.097761] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] c6dc5401-f59e-4c18-9553-1240e2f49bce/c6dc5401-f59e-4c18-9553-1240e2f49bce.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1863.098042] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4eb473dc-0c10-452a-b73c-00505629a0b9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.105050] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Waiting for the task: (returnval){ [ 1863.105050] env[62684]: value = "task-2052700" [ 1863.105050] env[62684]: _type = "Task" [ 1863.105050] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1863.112803] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052700, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.141562] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1863.151823] env[62684]: DEBUG oslo_vmware.api [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Task: {'id': task-2052698, 'name': Rename_Task, 'duration_secs': 0.325576} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1863.152546] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1863.152546] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3e51eb54-9c23-423b-b8ef-41e5a4fb26fc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.160134] env[62684]: DEBUG oslo_vmware.api [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Waiting for the task: (returnval){ [ 1863.160134] env[62684]: value = "task-2052701" [ 1863.160134] env[62684]: _type = "Task" [ 1863.160134] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1863.165852] env[62684]: DEBUG oslo_vmware.api [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Task: {'id': task-2052701, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.264193] env[62684]: DEBUG nova.compute.manager [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1863.291077] env[62684]: DEBUG nova.virt.hardware [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1863.291366] env[62684]: DEBUG nova.virt.hardware [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1863.291539] env[62684]: DEBUG nova.virt.hardware [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1863.291721] env[62684]: DEBUG nova.virt.hardware [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1863.291874] env[62684]: DEBUG nova.virt.hardware [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1863.292042] env[62684]: DEBUG nova.virt.hardware [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1863.292270] env[62684]: DEBUG nova.virt.hardware [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1863.292436] env[62684]: DEBUG nova.virt.hardware [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1863.292607] env[62684]: DEBUG nova.virt.hardware [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1863.292779] env[62684]: DEBUG nova.virt.hardware [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1863.292963] env[62684]: DEBUG nova.virt.hardware [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1863.293931] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d31e686d-26a1-4bb9-8e7f-2ef3d79b1362 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.301735] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bb1e631-648e-4d36-9403-bc6147c2f335 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.065301] env[62684]: DEBUG oslo_concurrency.lockutils [req-3c9a1b72-6bb7-4209-bdb6-8f63f345f7d2 req-861e3fe7-1f1c-4a65-807b-d44f11d405a6 service nova] Releasing lock "refresh_cache-b4cd871a-30ea-4b7a-98ad-00b8676dc2cd" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1864.066285] env[62684]: DEBUG oslo_concurrency.lockutils [None req-abdd27a4-e762-4e3b-91f4-420d0975ed68 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1864.067781] env[62684]: DEBUG nova.network.neutron [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Successfully updated port: c4406072-51a1-483b-89d6-d1b7ed992955 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1864.072251] env[62684]: DEBUG nova.compute.manager [req-c719bad5-3dea-40a3-9ec4-a061879632c7 req-0ac14008-94e7-4ad3-a56c-f51e757df9ff service nova] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Received event network-vif-plugged-c4406072-51a1-483b-89d6-d1b7ed992955 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1864.072448] env[62684]: DEBUG oslo_concurrency.lockutils [req-c719bad5-3dea-40a3-9ec4-a061879632c7 req-0ac14008-94e7-4ad3-a56c-f51e757df9ff service nova] Acquiring lock "5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1864.072653] env[62684]: DEBUG oslo_concurrency.lockutils [req-c719bad5-3dea-40a3-9ec4-a061879632c7 req-0ac14008-94e7-4ad3-a56c-f51e757df9ff service nova] Lock "5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1864.072819] env[62684]: DEBUG oslo_concurrency.lockutils [req-c719bad5-3dea-40a3-9ec4-a061879632c7 req-0ac14008-94e7-4ad3-a56c-f51e757df9ff service nova] Lock "5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1864.072985] env[62684]: DEBUG nova.compute.manager [req-c719bad5-3dea-40a3-9ec4-a061879632c7 req-0ac14008-94e7-4ad3-a56c-f51e757df9ff service nova] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] No waiting events found dispatching network-vif-plugged-c4406072-51a1-483b-89d6-d1b7ed992955 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1864.073163] env[62684]: WARNING nova.compute.manager [req-c719bad5-3dea-40a3-9ec4-a061879632c7 req-0ac14008-94e7-4ad3-a56c-f51e757df9ff service nova] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Received unexpected event network-vif-plugged-c4406072-51a1-483b-89d6-d1b7ed992955 for instance with vm_state building and task_state spawning. [ 1864.080258] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Acquiring lock "refresh_cache-5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1864.080386] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Acquired lock "refresh_cache-5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1864.080527] env[62684]: DEBUG nova.network.neutron [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1864.089023] env[62684]: DEBUG oslo_vmware.api [None req-1254983d-cf7e-4507-b977-0c428994dfb4 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] Task: {'id': task-2052699, 'name': ReconfigVM_Task, 'duration_secs': 0.156141} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1864.093310] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-1254983d-cf7e-4507-b977-0c428994dfb4 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421237', 'volume_id': '42560db2-1a6f-4d32-878e-bb36627ec4d0', 'name': 'volume-42560db2-1a6f-4d32-878e-bb36627ec4d0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd532b5fa-90a3-4f25-8684-4eabaa432c86', 'attached_at': '', 'detached_at': '', 'volume_id': '42560db2-1a6f-4d32-878e-bb36627ec4d0', 'serial': '42560db2-1a6f-4d32-878e-bb36627ec4d0'} {{(pid=62684) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1864.094718] env[62684]: DEBUG oslo_vmware.api [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Task: {'id': task-2052701, 'name': PowerOnVM_Task, 'duration_secs': 0.570865} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1864.095570] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052700, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.676821} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1864.096536] env[62684]: DEBUG nova.scheduler.client.report [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Updated inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f with generation 63 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1864.096768] env[62684]: DEBUG nova.compute.provider_tree [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Updating resource provider c23c281e-ec1f-4876-972e-a98655f2084f generation from 63 to 64 during operation: update_inventory {{(pid=62684) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1864.096947] env[62684]: DEBUG nova.compute.provider_tree [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1864.103021] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1864.103021] env[62684]: DEBUG nova.compute.manager [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1864.103021] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] c6dc5401-f59e-4c18-9553-1240e2f49bce/c6dc5401-f59e-4c18-9553-1240e2f49bce.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1864.103021] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1864.103021] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a8a0bf5-4dcf-4149-a3f2-cc699182cfea {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.104713] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f4b60ac2-d28b-43a3-94b8-77f109cb95ec {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.114981] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Waiting for the task: (returnval){ [ 1864.114981] env[62684]: value = "task-2052702" [ 1864.114981] env[62684]: _type = "Task" [ 1864.114981] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1864.124682] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052702, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.602559] env[62684]: DEBUG oslo_concurrency.lockutils [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.366s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1864.603000] env[62684]: DEBUG nova.compute.manager [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1864.606742] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b79826dc-dd2b-41dc-933a-2141b6a4d834 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.483s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1864.607020] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b79826dc-dd2b-41dc-933a-2141b6a4d834 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1864.608969] env[62684]: DEBUG oslo_concurrency.lockutils [None req-35ad379d-d778-477c-a6f4-0f55a64069c4 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.075s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1864.609217] env[62684]: DEBUG nova.objects.instance [None req-35ad379d-d778-477c-a6f4-0f55a64069c4 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lazy-loading 'resources' on Instance uuid 6d4061e4-a074-445d-95c5-239014ee87f3 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1864.626668] env[62684]: DEBUG oslo_concurrency.lockutils [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1864.633224] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052702, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066519} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1864.635136] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1864.636633] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1efbdc90-ef10-4510-a1c9-724d777c48df {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.639528] env[62684]: DEBUG nova.network.neutron [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1864.662051] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] c6dc5401-f59e-4c18-9553-1240e2f49bce/c6dc5401-f59e-4c18-9553-1240e2f49bce.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1864.666040] env[62684]: INFO nova.scheduler.client.report [None req-b79826dc-dd2b-41dc-933a-2141b6a4d834 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Deleted allocations for instance 3a172e9f-9f79-489e-9571-80bd74ad8609 [ 1864.667019] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2ec87d66-4295-40a6-84cc-4fc3fe751e50 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.692128] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Waiting for the task: (returnval){ [ 1864.692128] env[62684]: value = "task-2052703" [ 1864.692128] env[62684]: _type = "Task" [ 1864.692128] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1864.702394] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052703, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.828355] env[62684]: DEBUG nova.network.neutron [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Updating instance_info_cache with network_info: [{"id": "c4406072-51a1-483b-89d6-d1b7ed992955", "address": "fa:16:3e:89:98:2b", "network": {"id": "67d45e5a-c931-48e0-8be6-ad19f860ff6f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1665326946-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "843e3293347643789e54644c035332dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4406072-51", "ovs_interfaceid": "c4406072-51a1-483b-89d6-d1b7ed992955", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1865.109035] env[62684]: DEBUG nova.compute.utils [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1865.110589] env[62684]: DEBUG nova.compute.manager [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1865.110754] env[62684]: DEBUG nova.network.neutron [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1865.136873] env[62684]: DEBUG nova.objects.instance [None req-1254983d-cf7e-4507-b977-0c428994dfb4 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] Lazy-loading 'flavor' on Instance uuid d532b5fa-90a3-4f25-8684-4eabaa432c86 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1865.178802] env[62684]: DEBUG nova.policy [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b6ff6082c4844de797e8aee5e8bd43fd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '540d70f4b6274c38a5e79c00e389d8fe', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1865.190537] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b79826dc-dd2b-41dc-933a-2141b6a4d834 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Lock "3a172e9f-9f79-489e-9571-80bd74ad8609" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.015s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1865.192121] env[62684]: DEBUG oslo_concurrency.lockutils [None req-72150c9f-0895-4b84-806f-aa4d16d5d795 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Lock "3a172e9f-9f79-489e-9571-80bd74ad8609" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 29.829s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1865.192121] env[62684]: DEBUG oslo_concurrency.lockutils [None req-72150c9f-0895-4b84-806f-aa4d16d5d795 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Acquiring lock "3a172e9f-9f79-489e-9571-80bd74ad8609-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1865.192121] env[62684]: DEBUG oslo_concurrency.lockutils [None req-72150c9f-0895-4b84-806f-aa4d16d5d795 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Lock "3a172e9f-9f79-489e-9571-80bd74ad8609-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1865.192121] env[62684]: DEBUG oslo_concurrency.lockutils [None req-72150c9f-0895-4b84-806f-aa4d16d5d795 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Lock "3a172e9f-9f79-489e-9571-80bd74ad8609-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1865.196595] env[62684]: INFO nova.compute.manager [None req-72150c9f-0895-4b84-806f-aa4d16d5d795 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Terminating instance [ 1865.202897] env[62684]: DEBUG oslo_concurrency.lockutils [None req-72150c9f-0895-4b84-806f-aa4d16d5d795 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Acquiring lock "refresh_cache-3a172e9f-9f79-489e-9571-80bd74ad8609" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1865.203359] env[62684]: DEBUG oslo_concurrency.lockutils [None req-72150c9f-0895-4b84-806f-aa4d16d5d795 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Acquired lock "refresh_cache-3a172e9f-9f79-489e-9571-80bd74ad8609" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1865.203615] env[62684]: DEBUG nova.network.neutron [None req-72150c9f-0895-4b84-806f-aa4d16d5d795 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1865.211484] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052703, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.333152] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Releasing lock "refresh_cache-5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1865.333493] env[62684]: DEBUG nova.compute.manager [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Instance network_info: |[{"id": "c4406072-51a1-483b-89d6-d1b7ed992955", "address": "fa:16:3e:89:98:2b", "network": {"id": "67d45e5a-c931-48e0-8be6-ad19f860ff6f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1665326946-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "843e3293347643789e54644c035332dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4406072-51", "ovs_interfaceid": "c4406072-51a1-483b-89d6-d1b7ed992955", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1865.334475] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:89:98:2b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92cdccfd-4b10-4024-b724-5f22792dd4de', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c4406072-51a1-483b-89d6-d1b7ed992955', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1865.345139] env[62684]: DEBUG oslo.service.loopingcall [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1865.347070] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1865.347868] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eccf93cf-a300-44a3-955c-56a072908c98 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.371512] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1865.371512] env[62684]: value = "task-2052704" [ 1865.371512] env[62684]: _type = "Task" [ 1865.371512] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1865.382512] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052704, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.477229] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b0765ddf-11d5-45af-89c3-7acdd89d5756 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Acquiring lock "e2a9ab56-bde3-40b6-a214-19c77a9c6778" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1865.477619] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b0765ddf-11d5-45af-89c3-7acdd89d5756 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Lock "e2a9ab56-bde3-40b6-a214-19c77a9c6778" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1865.477724] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b0765ddf-11d5-45af-89c3-7acdd89d5756 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Acquiring lock "e2a9ab56-bde3-40b6-a214-19c77a9c6778-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1865.477902] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b0765ddf-11d5-45af-89c3-7acdd89d5756 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Lock "e2a9ab56-bde3-40b6-a214-19c77a9c6778-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1865.478095] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b0765ddf-11d5-45af-89c3-7acdd89d5756 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Lock "e2a9ab56-bde3-40b6-a214-19c77a9c6778-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1865.482666] env[62684]: INFO nova.compute.manager [None req-b0765ddf-11d5-45af-89c3-7acdd89d5756 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Terminating instance [ 1865.484600] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b0765ddf-11d5-45af-89c3-7acdd89d5756 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Acquiring lock "refresh_cache-e2a9ab56-bde3-40b6-a214-19c77a9c6778" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1865.484766] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b0765ddf-11d5-45af-89c3-7acdd89d5756 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Acquired lock "refresh_cache-e2a9ab56-bde3-40b6-a214-19c77a9c6778" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1865.484948] env[62684]: DEBUG nova.network.neutron [None req-b0765ddf-11d5-45af-89c3-7acdd89d5756 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1865.616014] env[62684]: DEBUG nova.compute.manager [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1865.629066] env[62684]: DEBUG nova.network.neutron [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Successfully created port: 1d1c0f31-e026-45f0-b3c8-5ba02555e863 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1865.652163] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1254983d-cf7e-4507-b977-0c428994dfb4 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] Lock "d532b5fa-90a3-4f25-8684-4eabaa432c86" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 9.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1865.682312] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20451c39-81c1-4953-974a-6993dd9af5e0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.691115] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8da58f26-8cc8-46cd-8064-de30c04b830f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.705651] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052703, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.730891] env[62684]: DEBUG nova.compute.utils [None req-72150c9f-0895-4b84-806f-aa4d16d5d795 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Can not refresh info_cache because instance was not found {{(pid=62684) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1024}} [ 1865.733866] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7e0562d-0835-4838-993d-bf0aab6a9006 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.742105] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42f7d8e0-1ea9-43bd-866c-e1f12400d2ed {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.755653] env[62684]: DEBUG nova.compute.provider_tree [None req-35ad379d-d778-477c-a6f4-0f55a64069c4 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1865.757661] env[62684]: DEBUG nova.network.neutron [None req-72150c9f-0895-4b84-806f-aa4d16d5d795 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1865.847638] env[62684]: DEBUG nova.network.neutron [None req-72150c9f-0895-4b84-806f-aa4d16d5d795 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1865.889439] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052704, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.011197] env[62684]: DEBUG nova.network.neutron [None req-b0765ddf-11d5-45af-89c3-7acdd89d5756 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1866.072259] env[62684]: DEBUG nova.network.neutron [None req-b0765ddf-11d5-45af-89c3-7acdd89d5756 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1866.206818] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052703, 'name': ReconfigVM_Task, 'duration_secs': 1.040468} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.207576] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Reconfigured VM instance instance-00000027 to attach disk [datastore1] c6dc5401-f59e-4c18-9553-1240e2f49bce/c6dc5401-f59e-4c18-9553-1240e2f49bce.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1866.208316] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eeb79bb3-be78-4064-b023-8b67dec6c66f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.214487] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Waiting for the task: (returnval){ [ 1866.214487] env[62684]: value = "task-2052705" [ 1866.214487] env[62684]: _type = "Task" [ 1866.214487] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.225088] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052705, 'name': Rename_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.261314] env[62684]: DEBUG nova.scheduler.client.report [None req-35ad379d-d778-477c-a6f4-0f55a64069c4 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1866.353941] env[62684]: DEBUG oslo_concurrency.lockutils [None req-72150c9f-0895-4b84-806f-aa4d16d5d795 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Releasing lock "refresh_cache-3a172e9f-9f79-489e-9571-80bd74ad8609" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1866.354420] env[62684]: DEBUG nova.compute.manager [None req-72150c9f-0895-4b84-806f-aa4d16d5d795 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1866.354648] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-72150c9f-0895-4b84-806f-aa4d16d5d795 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1866.354995] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3b5c773e-3b5b-424b-b2dc-230ceb6aa585 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.363884] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faff072f-684a-4952-88b5-76bc65277e00 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.382308] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052704, 'name': CreateVM_Task, 'duration_secs': 0.714173} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.382466] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1866.383186] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1866.383362] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1866.383701] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1866.383951] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b03cb466-9d65-4ce1-8ae1-ecb89ac13bf4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.388474] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Waiting for the task: (returnval){ [ 1866.388474] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52defd20-b1eb-aec9-a7bc-7d3e45e65a25" [ 1866.388474] env[62684]: _type = "Task" [ 1866.388474] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.403223] env[62684]: WARNING nova.virt.vmwareapi.vmops [None req-72150c9f-0895-4b84-806f-aa4d16d5d795 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3a172e9f-9f79-489e-9571-80bd74ad8609 could not be found. [ 1866.403559] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-72150c9f-0895-4b84-806f-aa4d16d5d795 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1866.403782] env[62684]: INFO nova.compute.manager [None req-72150c9f-0895-4b84-806f-aa4d16d5d795 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1866.405385] env[62684]: DEBUG oslo.service.loopingcall [None req-72150c9f-0895-4b84-806f-aa4d16d5d795 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1866.407755] env[62684]: DEBUG nova.compute.manager [-] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1866.407755] env[62684]: DEBUG nova.network.neutron [-] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1866.414547] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52defd20-b1eb-aec9-a7bc-7d3e45e65a25, 'name': SearchDatastore_Task, 'duration_secs': 0.008399} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.414756] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1866.416700] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1866.416700] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1866.416700] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1866.416700] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1866.416700] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f1b91107-475e-4176-95bf-8e13a51a98ad {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.424762] env[62684]: DEBUG nova.network.neutron [-] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1866.428668] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1866.429349] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1866.429597] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ad71e22-c435-4478-aaa2-e124c97da966 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.436346] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Waiting for the task: (returnval){ [ 1866.436346] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]522a3ee9-8b21-a447-671e-6a9858160072" [ 1866.436346] env[62684]: _type = "Task" [ 1866.436346] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.441324] env[62684]: DEBUG nova.compute.manager [req-414216fc-eef2-491d-a646-cb903cc34a59 req-eec2206d-3978-4515-9c3d-c3ae4fb712ce service nova] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Received event network-changed-c4406072-51a1-483b-89d6-d1b7ed992955 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1866.441508] env[62684]: DEBUG nova.compute.manager [req-414216fc-eef2-491d-a646-cb903cc34a59 req-eec2206d-3978-4515-9c3d-c3ae4fb712ce service nova] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Refreshing instance network info cache due to event network-changed-c4406072-51a1-483b-89d6-d1b7ed992955. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1866.444019] env[62684]: DEBUG oslo_concurrency.lockutils [req-414216fc-eef2-491d-a646-cb903cc34a59 req-eec2206d-3978-4515-9c3d-c3ae4fb712ce service nova] Acquiring lock "refresh_cache-5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1866.444019] env[62684]: DEBUG oslo_concurrency.lockutils [req-414216fc-eef2-491d-a646-cb903cc34a59 req-eec2206d-3978-4515-9c3d-c3ae4fb712ce service nova] Acquired lock "refresh_cache-5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1866.444019] env[62684]: DEBUG nova.network.neutron [req-414216fc-eef2-491d-a646-cb903cc34a59 req-eec2206d-3978-4515-9c3d-c3ae4fb712ce service nova] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Refreshing network info cache for port c4406072-51a1-483b-89d6-d1b7ed992955 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1866.449016] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]522a3ee9-8b21-a447-671e-6a9858160072, 'name': SearchDatastore_Task, 'duration_secs': 0.007842} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.449804] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1047e01f-409c-4540-a9c2-95e0ca5f0d46 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.455778] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Waiting for the task: (returnval){ [ 1866.455778] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d1dab3-7d93-d3a2-b64f-547fc867f162" [ 1866.455778] env[62684]: _type = "Task" [ 1866.455778] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.463671] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d1dab3-7d93-d3a2-b64f-547fc867f162, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.574524] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b0765ddf-11d5-45af-89c3-7acdd89d5756 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Releasing lock "refresh_cache-e2a9ab56-bde3-40b6-a214-19c77a9c6778" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1866.574994] env[62684]: DEBUG nova.compute.manager [None req-b0765ddf-11d5-45af-89c3-7acdd89d5756 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1866.575216] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b0765ddf-11d5-45af-89c3-7acdd89d5756 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1866.576149] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38a5314f-4597-471f-b506-61a5b1884c1c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.585449] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0765ddf-11d5-45af-89c3-7acdd89d5756 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1866.585449] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a2ead0a8-c457-42c1-aeff-487ce95fc751 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.591760] env[62684]: DEBUG oslo_vmware.api [None req-b0765ddf-11d5-45af-89c3-7acdd89d5756 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Waiting for the task: (returnval){ [ 1866.591760] env[62684]: value = "task-2052706" [ 1866.591760] env[62684]: _type = "Task" [ 1866.591760] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.601216] env[62684]: DEBUG oslo_vmware.api [None req-b0765ddf-11d5-45af-89c3-7acdd89d5756 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': task-2052706, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.623113] env[62684]: DEBUG nova.compute.manager [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1866.649247] env[62684]: DEBUG nova.virt.hardware [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1866.649557] env[62684]: DEBUG nova.virt.hardware [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1866.649722] env[62684]: DEBUG nova.virt.hardware [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1866.649913] env[62684]: DEBUG nova.virt.hardware [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1866.650081] env[62684]: DEBUG nova.virt.hardware [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1866.650240] env[62684]: DEBUG nova.virt.hardware [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1866.650456] env[62684]: DEBUG nova.virt.hardware [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1866.650622] env[62684]: DEBUG nova.virt.hardware [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1866.650798] env[62684]: DEBUG nova.virt.hardware [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1866.650961] env[62684]: DEBUG nova.virt.hardware [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1866.652029] env[62684]: DEBUG nova.virt.hardware [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1866.652029] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ac177c9-d57b-4975-a75b-438bb0dd4851 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.659923] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51e7e328-eb20-483a-8df3-8087a7f33522 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.724802] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052705, 'name': Rename_Task, 'duration_secs': 0.144596} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.725080] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1866.725340] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c75df7bb-5fda-4e85-94f4-c24ab065c1a1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.736365] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Waiting for the task: (returnval){ [ 1866.736365] env[62684]: value = "task-2052707" [ 1866.736365] env[62684]: _type = "Task" [ 1866.736365] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.746999] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052707, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.766793] env[62684]: DEBUG oslo_concurrency.lockutils [None req-35ad379d-d778-477c-a6f4-0f55a64069c4 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.158s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1866.769443] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5fe54878-d0fb-4cd4-a5d4-03c4c1836334 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.305s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1866.769540] env[62684]: DEBUG nova.objects.instance [None req-5fe54878-d0fb-4cd4-a5d4-03c4c1836334 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lazy-loading 'resources' on Instance uuid 52839b18-a68a-4ec7-a921-c42454955e82 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1866.797281] env[62684]: INFO nova.scheduler.client.report [None req-35ad379d-d778-477c-a6f4-0f55a64069c4 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Deleted allocations for instance 6d4061e4-a074-445d-95c5-239014ee87f3 [ 1866.927925] env[62684]: DEBUG nova.network.neutron [-] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1866.966456] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d1dab3-7d93-d3a2-b64f-547fc867f162, 'name': SearchDatastore_Task, 'duration_secs': 0.008094} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.966748] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1866.967030] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7/5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1866.967305] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-59856699-277d-4f0d-bf28-370b241aafda {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.974205] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Waiting for the task: (returnval){ [ 1866.974205] env[62684]: value = "task-2052708" [ 1866.974205] env[62684]: _type = "Task" [ 1866.974205] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.982864] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052708, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.103596] env[62684]: DEBUG oslo_vmware.api [None req-b0765ddf-11d5-45af-89c3-7acdd89d5756 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': task-2052706, 'name': PowerOffVM_Task, 'duration_secs': 0.236823} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1867.106643] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0765ddf-11d5-45af-89c3-7acdd89d5756 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1867.106862] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b0765ddf-11d5-45af-89c3-7acdd89d5756 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1867.107149] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3cee45fe-61ef-4d51-b031-3bc83cccb415 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.134939] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b0765ddf-11d5-45af-89c3-7acdd89d5756 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1867.135183] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b0765ddf-11d5-45af-89c3-7acdd89d5756 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1867.135367] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0765ddf-11d5-45af-89c3-7acdd89d5756 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Deleting the datastore file [datastore1] e2a9ab56-bde3-40b6-a214-19c77a9c6778 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1867.135724] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6d45da33-7b9a-4395-862c-0a0b2f4988ee {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.142015] env[62684]: DEBUG oslo_vmware.api [None req-b0765ddf-11d5-45af-89c3-7acdd89d5756 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Waiting for the task: (returnval){ [ 1867.142015] env[62684]: value = "task-2052710" [ 1867.142015] env[62684]: _type = "Task" [ 1867.142015] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1867.150320] env[62684]: DEBUG oslo_vmware.api [None req-b0765ddf-11d5-45af-89c3-7acdd89d5756 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': task-2052710, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.186424] env[62684]: DEBUG nova.network.neutron [req-414216fc-eef2-491d-a646-cb903cc34a59 req-eec2206d-3978-4515-9c3d-c3ae4fb712ce service nova] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Updated VIF entry in instance network info cache for port c4406072-51a1-483b-89d6-d1b7ed992955. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1867.186952] env[62684]: DEBUG nova.network.neutron [req-414216fc-eef2-491d-a646-cb903cc34a59 req-eec2206d-3978-4515-9c3d-c3ae4fb712ce service nova] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Updating instance_info_cache with network_info: [{"id": "c4406072-51a1-483b-89d6-d1b7ed992955", "address": "fa:16:3e:89:98:2b", "network": {"id": "67d45e5a-c931-48e0-8be6-ad19f860ff6f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1665326946-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "843e3293347643789e54644c035332dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4406072-51", "ovs_interfaceid": "c4406072-51a1-483b-89d6-d1b7ed992955", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1867.231931] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5a8008a9-5453-4500-93da-cdc922184b64 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Acquiring lock "6b1f0e69-3915-40dc-b4ec-93ab174f12b6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1867.232344] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5a8008a9-5453-4500-93da-cdc922184b64 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Lock "6b1f0e69-3915-40dc-b4ec-93ab174f12b6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1867.232584] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5a8008a9-5453-4500-93da-cdc922184b64 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Acquiring lock "6b1f0e69-3915-40dc-b4ec-93ab174f12b6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1867.232786] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5a8008a9-5453-4500-93da-cdc922184b64 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Lock "6b1f0e69-3915-40dc-b4ec-93ab174f12b6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1867.233027] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5a8008a9-5453-4500-93da-cdc922184b64 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Lock "6b1f0e69-3915-40dc-b4ec-93ab174f12b6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1867.235253] env[62684]: INFO nova.compute.manager [None req-5a8008a9-5453-4500-93da-cdc922184b64 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Terminating instance [ 1867.240879] env[62684]: DEBUG nova.compute.manager [None req-5a8008a9-5453-4500-93da-cdc922184b64 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1867.240879] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5a8008a9-5453-4500-93da-cdc922184b64 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1867.241396] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f64e376-dedd-4ad4-8d84-8dcb7fd731e2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.249677] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052707, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.251603] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a8008a9-5453-4500-93da-cdc922184b64 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1867.251827] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dd23a6a1-12c8-4a16-b496-bec4717bf5d2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.257705] env[62684]: DEBUG oslo_vmware.api [None req-5a8008a9-5453-4500-93da-cdc922184b64 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Waiting for the task: (returnval){ [ 1867.257705] env[62684]: value = "task-2052711" [ 1867.257705] env[62684]: _type = "Task" [ 1867.257705] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1867.262331] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7b66fac4-85a5-418e-aa2f-b2bcbc5c44b2 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] Acquiring lock "d532b5fa-90a3-4f25-8684-4eabaa432c86" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1867.262600] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7b66fac4-85a5-418e-aa2f-b2bcbc5c44b2 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] Lock "d532b5fa-90a3-4f25-8684-4eabaa432c86" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1867.267357] env[62684]: DEBUG oslo_vmware.api [None req-5a8008a9-5453-4500-93da-cdc922184b64 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': task-2052711, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.304632] env[62684]: DEBUG oslo_concurrency.lockutils [None req-35ad379d-d778-477c-a6f4-0f55a64069c4 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lock "6d4061e4-a074-445d-95c5-239014ee87f3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.879s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1867.305656] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "6d4061e4-a074-445d-95c5-239014ee87f3" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 11.954s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1867.306166] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-99e1c662-3bd5-4c54-9e54-1d0da225a9e1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.316116] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78af03c7-1de0-479d-a231-9cdcd6c92805 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.430277] env[62684]: INFO nova.compute.manager [-] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Took 1.02 seconds to deallocate network for instance. [ 1867.484981] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052708, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.654194] env[62684]: DEBUG oslo_vmware.api [None req-b0765ddf-11d5-45af-89c3-7acdd89d5756 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Task: {'id': task-2052710, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161792} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1867.657220] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0765ddf-11d5-45af-89c3-7acdd89d5756 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1867.657470] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b0765ddf-11d5-45af-89c3-7acdd89d5756 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1867.657671] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b0765ddf-11d5-45af-89c3-7acdd89d5756 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1867.657899] env[62684]: INFO nova.compute.manager [None req-b0765ddf-11d5-45af-89c3-7acdd89d5756 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1867.658202] env[62684]: DEBUG oslo.service.loopingcall [None req-b0765ddf-11d5-45af-89c3-7acdd89d5756 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1867.658652] env[62684]: DEBUG nova.compute.manager [-] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1867.658781] env[62684]: DEBUG nova.network.neutron [-] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1867.677401] env[62684]: DEBUG nova.network.neutron [-] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1867.686898] env[62684]: DEBUG nova.network.neutron [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Successfully updated port: 1d1c0f31-e026-45f0-b3c8-5ba02555e863 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1867.692231] env[62684]: DEBUG oslo_concurrency.lockutils [req-414216fc-eef2-491d-a646-cb903cc34a59 req-eec2206d-3978-4515-9c3d-c3ae4fb712ce service nova] Releasing lock "refresh_cache-5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1867.747882] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052707, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.769226] env[62684]: INFO nova.compute.manager [None req-7b66fac4-85a5-418e-aa2f-b2bcbc5c44b2 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Detaching volume 42560db2-1a6f-4d32-878e-bb36627ec4d0 [ 1867.771061] env[62684]: DEBUG oslo_vmware.api [None req-5a8008a9-5453-4500-93da-cdc922184b64 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': task-2052711, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.786165] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5411fdb1-9601-4a36-ac5e-74e2df85d170 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.793892] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e4234d4-9136-4625-bf8e-ea6cbbf206e6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.825393] env[62684]: INFO nova.virt.block_device [None req-7b66fac4-85a5-418e-aa2f-b2bcbc5c44b2 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Attempting to driver detach volume 42560db2-1a6f-4d32-878e-bb36627ec4d0 from mountpoint /dev/sdb [ 1867.825659] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b66fac4-85a5-418e-aa2f-b2bcbc5c44b2 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Volume detach. Driver type: vmdk {{(pid=62684) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1867.825862] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b66fac4-85a5-418e-aa2f-b2bcbc5c44b2 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421237', 'volume_id': '42560db2-1a6f-4d32-878e-bb36627ec4d0', 'name': 'volume-42560db2-1a6f-4d32-878e-bb36627ec4d0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd532b5fa-90a3-4f25-8684-4eabaa432c86', 'attached_at': '', 'detached_at': '', 'volume_id': '42560db2-1a6f-4d32-878e-bb36627ec4d0', 'serial': '42560db2-1a6f-4d32-878e-bb36627ec4d0'} {{(pid=62684) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1867.826668] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-772a26c3-cb6d-4d55-b813-83a6d4fe36d0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.829720] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12ee727f-8168-488c-927e-8d3d5a2554c2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.852156] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ca03db3-88cd-4fe7-8222-d237252fd135 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.856686] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "6d4061e4-a074-445d-95c5-239014ee87f3" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.551s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1867.857512] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81d62fe9-d1f3-4993-bfde-ba506e6d296e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.870857] env[62684]: DEBUG nova.compute.provider_tree [None req-5fe54878-d0fb-4cd4-a5d4-03c4c1836334 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1867.874340] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bbc1b67-f806-47f2-9ad4-249c1e4fe81f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.897668] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6f0ebc4-3125-499d-be80-96e2333382e7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.913342] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b66fac4-85a5-418e-aa2f-b2bcbc5c44b2 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] The volume has not been displaced from its original location: [datastore1] volume-42560db2-1a6f-4d32-878e-bb36627ec4d0/volume-42560db2-1a6f-4d32-878e-bb36627ec4d0.vmdk. No consolidation needed. {{(pid=62684) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1867.918502] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b66fac4-85a5-418e-aa2f-b2bcbc5c44b2 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Reconfiguring VM instance instance-0000000c to detach disk 2001 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1867.919059] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c3401869-238f-4cb4-a845-a60a0f520b23 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.937854] env[62684]: INFO nova.compute.manager [None req-72150c9f-0895-4b84-806f-aa4d16d5d795 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Instance disappeared during terminate [ 1867.938056] env[62684]: DEBUG oslo_concurrency.lockutils [None req-72150c9f-0895-4b84-806f-aa4d16d5d795 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Lock "3a172e9f-9f79-489e-9571-80bd74ad8609" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.747s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1867.939240] env[62684]: DEBUG oslo_vmware.api [None req-7b66fac4-85a5-418e-aa2f-b2bcbc5c44b2 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] Waiting for the task: (returnval){ [ 1867.939240] env[62684]: value = "task-2052712" [ 1867.939240] env[62684]: _type = "Task" [ 1867.939240] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1867.939815] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "3a172e9f-9f79-489e-9571-80bd74ad8609" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 12.588s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1867.939815] env[62684]: INFO nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] During sync_power_state the instance has a pending task (deleting). Skip. [ 1867.939894] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "3a172e9f-9f79-489e-9571-80bd74ad8609" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1867.949422] env[62684]: DEBUG oslo_vmware.api [None req-7b66fac4-85a5-418e-aa2f-b2bcbc5c44b2 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] Task: {'id': task-2052712, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.985540] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052708, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.180633] env[62684]: DEBUG nova.network.neutron [-] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1868.194085] env[62684]: DEBUG oslo_concurrency.lockutils [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Acquiring lock "refresh_cache-025dfe36-1f14-4bda-84a0-d424364b745b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1868.194248] env[62684]: DEBUG oslo_concurrency.lockutils [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Acquired lock "refresh_cache-025dfe36-1f14-4bda-84a0-d424364b745b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1868.194401] env[62684]: DEBUG nova.network.neutron [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1868.248272] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052707, 'name': PowerOnVM_Task, 'duration_secs': 1.040803} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.248557] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1868.248772] env[62684]: INFO nova.compute.manager [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Took 10.45 seconds to spawn the instance on the hypervisor. [ 1868.248959] env[62684]: DEBUG nova.compute.manager [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1868.249744] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22285f51-b993-4610-8d2b-75bb8863b2c3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.268451] env[62684]: DEBUG oslo_vmware.api [None req-5a8008a9-5453-4500-93da-cdc922184b64 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': task-2052711, 'name': PowerOffVM_Task, 'duration_secs': 0.610099} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.268695] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a8008a9-5453-4500-93da-cdc922184b64 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1868.268869] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5a8008a9-5453-4500-93da-cdc922184b64 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1868.269124] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e868473c-88e7-43cb-b3c0-c038637f63ef {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.378767] env[62684]: DEBUG nova.scheduler.client.report [None req-5fe54878-d0fb-4cd4-a5d4-03c4c1836334 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1868.399505] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5a8008a9-5453-4500-93da-cdc922184b64 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1868.400126] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5a8008a9-5453-4500-93da-cdc922184b64 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1868.400438] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a8008a9-5453-4500-93da-cdc922184b64 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Deleting the datastore file [datastore2] 6b1f0e69-3915-40dc-b4ec-93ab174f12b6 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1868.401090] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a4ed4625-caf2-46ad-94b9-176008bfa29f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.409466] env[62684]: DEBUG oslo_vmware.api [None req-5a8008a9-5453-4500-93da-cdc922184b64 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Waiting for the task: (returnval){ [ 1868.409466] env[62684]: value = "task-2052714" [ 1868.409466] env[62684]: _type = "Task" [ 1868.409466] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.427438] env[62684]: DEBUG oslo_vmware.api [None req-5a8008a9-5453-4500-93da-cdc922184b64 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': task-2052714, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.449318] env[62684]: DEBUG oslo_vmware.api [None req-7b66fac4-85a5-418e-aa2f-b2bcbc5c44b2 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] Task: {'id': task-2052712, 'name': ReconfigVM_Task, 'duration_secs': 0.242781} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.449709] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b66fac4-85a5-418e-aa2f-b2bcbc5c44b2 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Reconfigured VM instance instance-0000000c to detach disk 2001 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1868.454538] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e114148c-910b-4e88-90f6-cfec1e8136f8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.471034] env[62684]: DEBUG oslo_vmware.api [None req-7b66fac4-85a5-418e-aa2f-b2bcbc5c44b2 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] Waiting for the task: (returnval){ [ 1868.471034] env[62684]: value = "task-2052715" [ 1868.471034] env[62684]: _type = "Task" [ 1868.471034] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.473257] env[62684]: DEBUG nova.compute.manager [req-8f6bd026-f817-407a-952e-4ee2eeb77782 req-9e64bb01-7201-451c-826a-1bdbe42851cd service nova] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Received event network-vif-plugged-1d1c0f31-e026-45f0-b3c8-5ba02555e863 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1868.473488] env[62684]: DEBUG oslo_concurrency.lockutils [req-8f6bd026-f817-407a-952e-4ee2eeb77782 req-9e64bb01-7201-451c-826a-1bdbe42851cd service nova] Acquiring lock "025dfe36-1f14-4bda-84a0-d424364b745b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1868.473740] env[62684]: DEBUG oslo_concurrency.lockutils [req-8f6bd026-f817-407a-952e-4ee2eeb77782 req-9e64bb01-7201-451c-826a-1bdbe42851cd service nova] Lock "025dfe36-1f14-4bda-84a0-d424364b745b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1868.473925] env[62684]: DEBUG oslo_concurrency.lockutils [req-8f6bd026-f817-407a-952e-4ee2eeb77782 req-9e64bb01-7201-451c-826a-1bdbe42851cd service nova] Lock "025dfe36-1f14-4bda-84a0-d424364b745b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1868.474126] env[62684]: DEBUG nova.compute.manager [req-8f6bd026-f817-407a-952e-4ee2eeb77782 req-9e64bb01-7201-451c-826a-1bdbe42851cd service nova] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] No waiting events found dispatching network-vif-plugged-1d1c0f31-e026-45f0-b3c8-5ba02555e863 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1868.474306] env[62684]: WARNING nova.compute.manager [req-8f6bd026-f817-407a-952e-4ee2eeb77782 req-9e64bb01-7201-451c-826a-1bdbe42851cd service nova] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Received unexpected event network-vif-plugged-1d1c0f31-e026-45f0-b3c8-5ba02555e863 for instance with vm_state building and task_state spawning. [ 1868.474471] env[62684]: DEBUG nova.compute.manager [req-8f6bd026-f817-407a-952e-4ee2eeb77782 req-9e64bb01-7201-451c-826a-1bdbe42851cd service nova] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Received event network-changed-1d1c0f31-e026-45f0-b3c8-5ba02555e863 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1868.474631] env[62684]: DEBUG nova.compute.manager [req-8f6bd026-f817-407a-952e-4ee2eeb77782 req-9e64bb01-7201-451c-826a-1bdbe42851cd service nova] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Refreshing instance network info cache due to event network-changed-1d1c0f31-e026-45f0-b3c8-5ba02555e863. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1868.474805] env[62684]: DEBUG oslo_concurrency.lockutils [req-8f6bd026-f817-407a-952e-4ee2eeb77782 req-9e64bb01-7201-451c-826a-1bdbe42851cd service nova] Acquiring lock "refresh_cache-025dfe36-1f14-4bda-84a0-d424364b745b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1868.487594] env[62684]: DEBUG oslo_vmware.api [None req-7b66fac4-85a5-418e-aa2f-b2bcbc5c44b2 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] Task: {'id': task-2052715, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.490984] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052708, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.684589] env[62684]: INFO nova.compute.manager [-] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Took 1.03 seconds to deallocate network for instance. [ 1868.748568] env[62684]: DEBUG nova.network.neutron [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1868.768282] env[62684]: INFO nova.compute.manager [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Took 43.89 seconds to build instance. [ 1868.884855] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5fe54878-d0fb-4cd4-a5d4-03c4c1836334 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.116s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1868.890704] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.018s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1868.892281] env[62684]: INFO nova.compute.claims [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1868.916445] env[62684]: INFO nova.scheduler.client.report [None req-5fe54878-d0fb-4cd4-a5d4-03c4c1836334 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Deleted allocations for instance 52839b18-a68a-4ec7-a921-c42454955e82 [ 1868.926724] env[62684]: DEBUG oslo_vmware.api [None req-5a8008a9-5453-4500-93da-cdc922184b64 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': task-2052714, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.987954] env[62684]: DEBUG oslo_vmware.api [None req-7b66fac4-85a5-418e-aa2f-b2bcbc5c44b2 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] Task: {'id': task-2052715, 'name': ReconfigVM_Task, 'duration_secs': 0.230726} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.989058] env[62684]: DEBUG nova.network.neutron [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Updating instance_info_cache with network_info: [{"id": "1d1c0f31-e026-45f0-b3c8-5ba02555e863", "address": "fa:16:3e:42:6e:d1", "network": {"id": "bf53c8de-5f43-4a15-9911-25340615a63b", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1946277195-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "540d70f4b6274c38a5e79c00e389d8fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6db039c-542c-4544-a57d-ddcc6c1e8e45", "external-id": "nsx-vlan-transportzone-810", "segmentation_id": 810, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d1c0f31-e0", "ovs_interfaceid": "1d1c0f31-e026-45f0-b3c8-5ba02555e863", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1868.991034] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b66fac4-85a5-418e-aa2f-b2bcbc5c44b2 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421237', 'volume_id': '42560db2-1a6f-4d32-878e-bb36627ec4d0', 'name': 'volume-42560db2-1a6f-4d32-878e-bb36627ec4d0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd532b5fa-90a3-4f25-8684-4eabaa432c86', 'attached_at': '', 'detached_at': '', 'volume_id': '42560db2-1a6f-4d32-878e-bb36627ec4d0', 'serial': '42560db2-1a6f-4d32-878e-bb36627ec4d0'} {{(pid=62684) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1868.997055] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052708, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.73483} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.997724] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7/5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1868.997973] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1868.998258] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-03d3fbd8-cb67-4654-947f-f1e4e00c57b6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.006684] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Waiting for the task: (returnval){ [ 1869.006684] env[62684]: value = "task-2052716" [ 1869.006684] env[62684]: _type = "Task" [ 1869.006684] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.015294] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052716, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.192977] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b0765ddf-11d5-45af-89c3-7acdd89d5756 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1869.271464] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Lock "c6dc5401-f59e-4c18-9553-1240e2f49bce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.244s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1869.273085] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "c6dc5401-f59e-4c18-9553-1240e2f49bce" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 13.919s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1869.273841] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b77e3224-fdba-40cb-a992-c52d2523908e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.419538] env[62684]: DEBUG oslo_vmware.api [None req-5a8008a9-5453-4500-93da-cdc922184b64 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Task: {'id': task-2052714, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.559163} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.419877] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a8008a9-5453-4500-93da-cdc922184b64 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1869.420022] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5a8008a9-5453-4500-93da-cdc922184b64 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1869.420586] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5a8008a9-5453-4500-93da-cdc922184b64 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1869.420746] env[62684]: INFO nova.compute.manager [None req-5a8008a9-5453-4500-93da-cdc922184b64 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Took 2.18 seconds to destroy the instance on the hypervisor. [ 1869.421104] env[62684]: DEBUG oslo.service.loopingcall [None req-5a8008a9-5453-4500-93da-cdc922184b64 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1869.421293] env[62684]: DEBUG nova.compute.manager [-] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1869.421389] env[62684]: DEBUG nova.network.neutron [-] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1869.433096] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5fe54878-d0fb-4cd4-a5d4-03c4c1836334 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lock "52839b18-a68a-4ec7-a921-c42454955e82" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.083s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1869.433096] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "52839b18-a68a-4ec7-a921-c42454955e82" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 14.082s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1869.433323] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-46511620-235e-469d-9545-490837f1af14 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.446663] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5985bae-fca3-4f76-b585-5434973c1795 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.499362] env[62684]: DEBUG oslo_concurrency.lockutils [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Releasing lock "refresh_cache-025dfe36-1f14-4bda-84a0-d424364b745b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1869.499362] env[62684]: DEBUG nova.compute.manager [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Instance network_info: |[{"id": "1d1c0f31-e026-45f0-b3c8-5ba02555e863", "address": "fa:16:3e:42:6e:d1", "network": {"id": "bf53c8de-5f43-4a15-9911-25340615a63b", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1946277195-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "540d70f4b6274c38a5e79c00e389d8fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6db039c-542c-4544-a57d-ddcc6c1e8e45", "external-id": "nsx-vlan-transportzone-810", "segmentation_id": 810, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d1c0f31-e0", "ovs_interfaceid": "1d1c0f31-e026-45f0-b3c8-5ba02555e863", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1869.499498] env[62684]: DEBUG oslo_concurrency.lockutils [req-8f6bd026-f817-407a-952e-4ee2eeb77782 req-9e64bb01-7201-451c-826a-1bdbe42851cd service nova] Acquired lock "refresh_cache-025dfe36-1f14-4bda-84a0-d424364b745b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1869.499498] env[62684]: DEBUG nova.network.neutron [req-8f6bd026-f817-407a-952e-4ee2eeb77782 req-9e64bb01-7201-451c-826a-1bdbe42851cd service nova] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Refreshing network info cache for port 1d1c0f31-e026-45f0-b3c8-5ba02555e863 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1869.500629] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:42:6e:d1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e6db039c-542c-4544-a57d-ddcc6c1e8e45', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1d1c0f31-e026-45f0-b3c8-5ba02555e863', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1869.507933] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Creating folder: Project (540d70f4b6274c38a5e79c00e389d8fe). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1869.509387] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-80881abd-b593-4fcf-9452-31c26b8a3ae5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.520279] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052716, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065685} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.520664] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1869.521373] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c2320cc-f30c-41d3-b56e-c22c61fa96a4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.525391] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Created folder: Project (540d70f4b6274c38a5e79c00e389d8fe) in parent group-v421118. [ 1869.525593] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Creating folder: Instances. Parent ref: group-v421241. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1869.526197] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b4abb693-b67c-4611-98d4-ab594b5cd404 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.545594] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Reconfiguring VM instance instance-00000028 to attach disk [datastore2] 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7/5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1869.546329] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dc63266a-d772-4ef2-a248-298a87a0d50d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.562844] env[62684]: DEBUG nova.objects.instance [None req-7b66fac4-85a5-418e-aa2f-b2bcbc5c44b2 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] Lazy-loading 'flavor' on Instance uuid d532b5fa-90a3-4f25-8684-4eabaa432c86 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1869.564501] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Created folder: Instances in parent group-v421241. [ 1869.564767] env[62684]: DEBUG oslo.service.loopingcall [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1869.565588] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1869.565816] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9b3175b6-a0a7-40f5-80bd-5f178f849d1b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.582998] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Waiting for the task: (returnval){ [ 1869.582998] env[62684]: value = "task-2052719" [ 1869.582998] env[62684]: _type = "Task" [ 1869.582998] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.588507] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1869.588507] env[62684]: value = "task-2052720" [ 1869.588507] env[62684]: _type = "Task" [ 1869.588507] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.591995] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052719, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.600260] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052720, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.777470] env[62684]: DEBUG nova.compute.manager [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1869.783793] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "c6dc5401-f59e-4c18-9553-1240e2f49bce" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.511s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1869.983366] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "52839b18-a68a-4ec7-a921-c42454955e82" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.550s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.098229] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052719, 'name': ReconfigVM_Task, 'duration_secs': 0.262357} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1870.102018] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Reconfigured VM instance instance-00000028 to attach disk [datastore2] 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7/5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1870.103308] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-458458a8-2e91-4415-893a-22d315e1b361 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.108818] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052720, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.114230] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Waiting for the task: (returnval){ [ 1870.114230] env[62684]: value = "task-2052721" [ 1870.114230] env[62684]: _type = "Task" [ 1870.114230] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1870.126801] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052721, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.172251] env[62684]: DEBUG nova.network.neutron [-] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1870.258659] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9decd35f-e52e-4437-8c90-2d11076e90af tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Acquiring lock "73f27fc0-ebae-41c7-b292-14396f79a5a2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1870.258974] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9decd35f-e52e-4437-8c90-2d11076e90af tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Lock "73f27fc0-ebae-41c7-b292-14396f79a5a2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1870.259250] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9decd35f-e52e-4437-8c90-2d11076e90af tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Acquiring lock "73f27fc0-ebae-41c7-b292-14396f79a5a2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1870.259479] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9decd35f-e52e-4437-8c90-2d11076e90af tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Lock "73f27fc0-ebae-41c7-b292-14396f79a5a2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1870.259703] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9decd35f-e52e-4437-8c90-2d11076e90af tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Lock "73f27fc0-ebae-41c7-b292-14396f79a5a2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.262391] env[62684]: INFO nova.compute.manager [None req-9decd35f-e52e-4437-8c90-2d11076e90af tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Terminating instance [ 1870.266725] env[62684]: DEBUG nova.compute.manager [None req-9decd35f-e52e-4437-8c90-2d11076e90af tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1870.266725] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9decd35f-e52e-4437-8c90-2d11076e90af tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1870.266725] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a367eba4-6e2f-4153-ad23-a204fb7f17e8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.274473] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-9decd35f-e52e-4437-8c90-2d11076e90af tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1870.277044] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-816ce818-d264-4ca4-991c-60a46dfb6611 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.284796] env[62684]: DEBUG oslo_vmware.api [None req-9decd35f-e52e-4437-8c90-2d11076e90af tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Waiting for the task: (returnval){ [ 1870.284796] env[62684]: value = "task-2052722" [ 1870.284796] env[62684]: _type = "Task" [ 1870.284796] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1870.300651] env[62684]: DEBUG oslo_vmware.api [None req-9decd35f-e52e-4437-8c90-2d11076e90af tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': task-2052722, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.303400] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1870.446023] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-857a5b95-78bf-4eab-b85d-602050714737 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.455823] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5983a71a-4d99-4fe3-b400-84ea9ddd9521 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.495009] env[62684]: DEBUG nova.network.neutron [req-8f6bd026-f817-407a-952e-4ee2eeb77782 req-9e64bb01-7201-451c-826a-1bdbe42851cd service nova] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Updated VIF entry in instance network info cache for port 1d1c0f31-e026-45f0-b3c8-5ba02555e863. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1870.495395] env[62684]: DEBUG nova.network.neutron [req-8f6bd026-f817-407a-952e-4ee2eeb77782 req-9e64bb01-7201-451c-826a-1bdbe42851cd service nova] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Updating instance_info_cache with network_info: [{"id": "1d1c0f31-e026-45f0-b3c8-5ba02555e863", "address": "fa:16:3e:42:6e:d1", "network": {"id": "bf53c8de-5f43-4a15-9911-25340615a63b", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1946277195-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "540d70f4b6274c38a5e79c00e389d8fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6db039c-542c-4544-a57d-ddcc6c1e8e45", "external-id": "nsx-vlan-transportzone-810", "segmentation_id": 810, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d1c0f31-e0", "ovs_interfaceid": "1d1c0f31-e026-45f0-b3c8-5ba02555e863", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1870.497245] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a807a512-e69d-4b06-b265-63ec57904de6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.505855] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53586495-ede2-48ee-bb33-2bcb300cafb8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.520390] env[62684]: DEBUG nova.compute.provider_tree [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1870.576643] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7b66fac4-85a5-418e-aa2f-b2bcbc5c44b2 tempest-VolumesAssistedSnapshotsTest-510206985 tempest-VolumesAssistedSnapshotsTest-510206985-project-admin] Lock "d532b5fa-90a3-4f25-8684-4eabaa432c86" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.314s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.606653] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052720, 'name': CreateVM_Task, 'duration_secs': 0.578647} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1870.607231] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1870.608048] env[62684]: DEBUG oslo_concurrency.lockutils [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1870.608251] env[62684]: DEBUG oslo_concurrency.lockutils [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1870.608644] env[62684]: DEBUG oslo_concurrency.lockutils [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1870.608920] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bfe5197e-7a38-47be-ab66-dfb94ede5315 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.618738] env[62684]: DEBUG oslo_vmware.api [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Waiting for the task: (returnval){ [ 1870.618738] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5299d79c-6a50-c991-451f-b5a589d0ea78" [ 1870.618738] env[62684]: _type = "Task" [ 1870.618738] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1870.633597] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052721, 'name': Rename_Task, 'duration_secs': 0.199738} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1870.637525] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1870.637920] env[62684]: DEBUG oslo_vmware.api [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5299d79c-6a50-c991-451f-b5a589d0ea78, 'name': SearchDatastore_Task, 'duration_secs': 0.015243} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1870.638149] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2c973381-a404-4ca1-9589-20013bcc7196 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.639843] env[62684]: DEBUG oslo_concurrency.lockutils [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1870.640126] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1870.640447] env[62684]: DEBUG oslo_concurrency.lockutils [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1870.640607] env[62684]: DEBUG oslo_concurrency.lockutils [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1870.640824] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1870.641440] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8bee8c9f-6409-440c-a9e1-e25b78fc773a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.647348] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Waiting for the task: (returnval){ [ 1870.647348] env[62684]: value = "task-2052723" [ 1870.647348] env[62684]: _type = "Task" [ 1870.647348] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1870.652184] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1870.652184] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1870.652961] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a3dfe88-eec4-4429-a07e-5274a0b0cd9e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.659944] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052723, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.663452] env[62684]: DEBUG oslo_vmware.api [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Waiting for the task: (returnval){ [ 1870.663452] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520604af-8f6a-883d-54cf-13de82ebdd4e" [ 1870.663452] env[62684]: _type = "Task" [ 1870.663452] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1870.675182] env[62684]: DEBUG oslo_vmware.api [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520604af-8f6a-883d-54cf-13de82ebdd4e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.676784] env[62684]: INFO nova.compute.manager [-] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Took 1.26 seconds to deallocate network for instance. [ 1870.797387] env[62684]: DEBUG oslo_vmware.api [None req-9decd35f-e52e-4437-8c90-2d11076e90af tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': task-2052722, 'name': PowerOffVM_Task, 'duration_secs': 0.38032} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1870.797767] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-9decd35f-e52e-4437-8c90-2d11076e90af tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1870.797844] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9decd35f-e52e-4437-8c90-2d11076e90af tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1870.798119] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-627e7c78-5362-40df-8626-9063688ffa31 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.919827] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9decd35f-e52e-4437-8c90-2d11076e90af tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1870.920112] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9decd35f-e52e-4437-8c90-2d11076e90af tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1870.920299] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-9decd35f-e52e-4437-8c90-2d11076e90af tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Deleting the datastore file [datastore1] 73f27fc0-ebae-41c7-b292-14396f79a5a2 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1870.920591] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c328cb98-8c42-4ca3-bc87-abcc7ef32030 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.927712] env[62684]: DEBUG oslo_vmware.api [None req-9decd35f-e52e-4437-8c90-2d11076e90af tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Waiting for the task: (returnval){ [ 1870.927712] env[62684]: value = "task-2052726" [ 1870.927712] env[62684]: _type = "Task" [ 1870.927712] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1870.938744] env[62684]: DEBUG oslo_vmware.api [None req-9decd35f-e52e-4437-8c90-2d11076e90af tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': task-2052726, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.986984] env[62684]: DEBUG nova.compute.manager [req-9e2ce48c-f8d3-43d4-9b74-57a50f1b018d req-4d0602b8-fd0b-4956-b361-4ac50aaf5eca service nova] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Received event network-vif-deleted-8f6b3e69-1998-4808-9c1a-1224c8ab5363 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1871.002665] env[62684]: DEBUG oslo_concurrency.lockutils [req-8f6bd026-f817-407a-952e-4ee2eeb77782 req-9e64bb01-7201-451c-826a-1bdbe42851cd service nova] Releasing lock "refresh_cache-025dfe36-1f14-4bda-84a0-d424364b745b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1871.024146] env[62684]: DEBUG nova.scheduler.client.report [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1871.160981] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Acquiring lock "feca8680-4baa-4b2c-9875-69a88b351dc0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1871.161254] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Lock "feca8680-4baa-4b2c-9875-69a88b351dc0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1871.161452] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052723, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.174356] env[62684]: DEBUG oslo_vmware.api [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520604af-8f6a-883d-54cf-13de82ebdd4e, 'name': SearchDatastore_Task, 'duration_secs': 0.016233} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1871.175184] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa11efcf-9221-4302-9ed8-db06ce9497ba {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.181427] env[62684]: DEBUG oslo_vmware.api [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Waiting for the task: (returnval){ [ 1871.181427] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52cd0c6a-186c-c576-dd26-3ceb1c2c4ec6" [ 1871.181427] env[62684]: _type = "Task" [ 1871.181427] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.186570] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5a8008a9-5453-4500-93da-cdc922184b64 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1871.192741] env[62684]: DEBUG oslo_vmware.api [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52cd0c6a-186c-c576-dd26-3ceb1c2c4ec6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.277395] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquiring lock "b009f710-1a94-4113-8feb-7cc5dd6a6519" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1871.277395] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lock "b009f710-1a94-4113-8feb-7cc5dd6a6519" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1871.302357] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquiring lock "df93c57e-716c-4c73-b551-9079a523ea0b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1871.302357] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lock "df93c57e-716c-4c73-b551-9079a523ea0b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1871.437533] env[62684]: DEBUG oslo_vmware.api [None req-9decd35f-e52e-4437-8c90-2d11076e90af tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': task-2052726, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.166167} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1871.439030] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-9decd35f-e52e-4437-8c90-2d11076e90af tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1871.439030] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9decd35f-e52e-4437-8c90-2d11076e90af tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1871.439030] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9decd35f-e52e-4437-8c90-2d11076e90af tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1871.439030] env[62684]: INFO nova.compute.manager [None req-9decd35f-e52e-4437-8c90-2d11076e90af tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1871.439030] env[62684]: DEBUG oslo.service.loopingcall [None req-9decd35f-e52e-4437-8c90-2d11076e90af tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1871.439279] env[62684]: DEBUG nova.compute.manager [-] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1871.439279] env[62684]: DEBUG nova.network.neutron [-] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1871.530959] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.640s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1871.531703] env[62684]: DEBUG nova.compute.manager [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1871.535299] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.482s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1871.537805] env[62684]: INFO nova.compute.claims [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1871.661500] env[62684]: DEBUG oslo_vmware.api [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052723, 'name': PowerOnVM_Task, 'duration_secs': 0.598581} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1871.661500] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1871.661701] env[62684]: INFO nova.compute.manager [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Took 8.40 seconds to spawn the instance on the hypervisor. [ 1871.661882] env[62684]: DEBUG nova.compute.manager [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1871.662661] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70205865-5243-45d7-8cda-9256ad64c706 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.690564] env[62684]: DEBUG oslo_vmware.api [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52cd0c6a-186c-c576-dd26-3ceb1c2c4ec6, 'name': SearchDatastore_Task, 'duration_secs': 0.012886} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1871.690784] env[62684]: DEBUG oslo_concurrency.lockutils [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1871.691064] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 025dfe36-1f14-4bda-84a0-d424364b745b/025dfe36-1f14-4bda-84a0-d424364b745b.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1871.691337] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8120b2bb-7e67-48d0-aaa7-8f72429e332e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.698401] env[62684]: DEBUG oslo_vmware.api [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Waiting for the task: (returnval){ [ 1871.698401] env[62684]: value = "task-2052727" [ 1871.698401] env[62684]: _type = "Task" [ 1871.698401] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.707073] env[62684]: DEBUG oslo_vmware.api [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2052727, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.045754] env[62684]: DEBUG nova.compute.utils [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1872.051433] env[62684]: DEBUG nova.compute.manager [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1872.052308] env[62684]: DEBUG nova.network.neutron [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1872.185297] env[62684]: INFO nova.compute.manager [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Took 46.75 seconds to build instance. [ 1872.210706] env[62684]: DEBUG oslo_vmware.api [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2052727, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.351527] env[62684]: DEBUG nova.policy [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fd6852cd96cc47b9a5f02ec1a9c8c92f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'efeb7b9b19c540c9a65cb4beed66d9bc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1872.507470] env[62684]: DEBUG nova.network.neutron [-] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1872.562018] env[62684]: DEBUG nova.compute.manager [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1872.688156] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a20f5b8-72fb-4b61-8c74-81830f2dd79a tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Lock "5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.592s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1872.711696] env[62684]: DEBUG oslo_vmware.api [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2052727, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.82822} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.711965] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 025dfe36-1f14-4bda-84a0-d424364b745b/025dfe36-1f14-4bda-84a0-d424364b745b.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1872.712205] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1872.712463] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d4202535-d761-4c83-9279-3a3acfa80775 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.721534] env[62684]: DEBUG oslo_vmware.api [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Waiting for the task: (returnval){ [ 1872.721534] env[62684]: value = "task-2052728" [ 1872.721534] env[62684]: _type = "Task" [ 1872.721534] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.730412] env[62684]: DEBUG oslo_vmware.api [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2052728, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.817279] env[62684]: DEBUG nova.network.neutron [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Successfully created port: 8da4cae5-6ee0-4160-8056-921fb0de7b4f {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1873.010447] env[62684]: INFO nova.compute.manager [-] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Took 1.57 seconds to deallocate network for instance. [ 1873.080193] env[62684]: DEBUG nova.compute.manager [req-7357d528-ff7c-4f30-b3df-21ca346351e6 req-24b7300c-e87d-45b0-b58a-db2a4c63edeb service nova] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Received event network-vif-deleted-30471946-98e4-4413-acb5-8a8190d1dd82 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1873.101379] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c96a57ae-c051-452d-be56-542c1bf861d3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.108850] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11034ebe-17c5-4174-91ac-f8539eacfc58 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.142299] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca22a785-d0ad-4465-93ef-9ac73537776d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.150118] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68474b18-f733-4032-9080-6da641c12863 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.164755] env[62684]: DEBUG nova.compute.provider_tree [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1873.190497] env[62684]: DEBUG nova.compute.manager [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1873.232693] env[62684]: DEBUG oslo_vmware.api [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2052728, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.116938} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.232934] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1873.233786] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dc45691-2792-491f-9e19-be4d8010b27e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.255996] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Reconfiguring VM instance instance-00000029 to attach disk [datastore2] 025dfe36-1f14-4bda-84a0-d424364b745b/025dfe36-1f14-4bda-84a0-d424364b745b.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1873.256300] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2bbf19e8-a24d-4917-b0cd-1c5c3e0e3107 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.281560] env[62684]: DEBUG oslo_vmware.api [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Waiting for the task: (returnval){ [ 1873.281560] env[62684]: value = "task-2052729" [ 1873.281560] env[62684]: _type = "Task" [ 1873.281560] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.290223] env[62684]: DEBUG oslo_vmware.api [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2052729, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.492837] env[62684]: DEBUG oslo_concurrency.lockutils [None req-272e1511-ea93-49d8-bf6f-d53c8f6e3349 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Acquiring lock "d532b5fa-90a3-4f25-8684-4eabaa432c86" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1873.493149] env[62684]: DEBUG oslo_concurrency.lockutils [None req-272e1511-ea93-49d8-bf6f-d53c8f6e3349 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Lock "d532b5fa-90a3-4f25-8684-4eabaa432c86" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1873.493368] env[62684]: DEBUG oslo_concurrency.lockutils [None req-272e1511-ea93-49d8-bf6f-d53c8f6e3349 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Acquiring lock "d532b5fa-90a3-4f25-8684-4eabaa432c86-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1873.493565] env[62684]: DEBUG oslo_concurrency.lockutils [None req-272e1511-ea93-49d8-bf6f-d53c8f6e3349 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Lock "d532b5fa-90a3-4f25-8684-4eabaa432c86-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1873.493780] env[62684]: DEBUG oslo_concurrency.lockutils [None req-272e1511-ea93-49d8-bf6f-d53c8f6e3349 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Lock "d532b5fa-90a3-4f25-8684-4eabaa432c86-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1873.496270] env[62684]: INFO nova.compute.manager [None req-272e1511-ea93-49d8-bf6f-d53c8f6e3349 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Terminating instance [ 1873.498504] env[62684]: DEBUG nova.compute.manager [None req-272e1511-ea93-49d8-bf6f-d53c8f6e3349 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1873.498704] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-272e1511-ea93-49d8-bf6f-d53c8f6e3349 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1873.499545] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f00bfcc-b5b0-4f45-bbfb-623b6dad067a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.506998] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-272e1511-ea93-49d8-bf6f-d53c8f6e3349 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1873.507250] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5e448c7d-2f2c-4688-b4c3-5a7bb3f7b087 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.513345] env[62684]: DEBUG oslo_vmware.api [None req-272e1511-ea93-49d8-bf6f-d53c8f6e3349 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Waiting for the task: (returnval){ [ 1873.513345] env[62684]: value = "task-2052730" [ 1873.513345] env[62684]: _type = "Task" [ 1873.513345] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.518719] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9decd35f-e52e-4437-8c90-2d11076e90af tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1873.521965] env[62684]: DEBUG oslo_vmware.api [None req-272e1511-ea93-49d8-bf6f-d53c8f6e3349 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Task: {'id': task-2052730, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.564793] env[62684]: DEBUG oslo_concurrency.lockutils [None req-75edc3f3-b696-46f7-8090-95819be5fa8c tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Acquiring lock "cfe219da-adf9-44b9-9df3-752ccf72a68b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1873.565124] env[62684]: DEBUG oslo_concurrency.lockutils [None req-75edc3f3-b696-46f7-8090-95819be5fa8c tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Lock "cfe219da-adf9-44b9-9df3-752ccf72a68b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1873.565402] env[62684]: DEBUG oslo_concurrency.lockutils [None req-75edc3f3-b696-46f7-8090-95819be5fa8c tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Acquiring lock "cfe219da-adf9-44b9-9df3-752ccf72a68b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1873.565700] env[62684]: DEBUG oslo_concurrency.lockutils [None req-75edc3f3-b696-46f7-8090-95819be5fa8c tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Lock "cfe219da-adf9-44b9-9df3-752ccf72a68b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1873.565925] env[62684]: DEBUG oslo_concurrency.lockutils [None req-75edc3f3-b696-46f7-8090-95819be5fa8c tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Lock "cfe219da-adf9-44b9-9df3-752ccf72a68b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1873.568321] env[62684]: INFO nova.compute.manager [None req-75edc3f3-b696-46f7-8090-95819be5fa8c tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Terminating instance [ 1873.572775] env[62684]: DEBUG nova.compute.manager [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1873.577178] env[62684]: DEBUG nova.compute.manager [None req-75edc3f3-b696-46f7-8090-95819be5fa8c tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1873.577440] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-75edc3f3-b696-46f7-8090-95819be5fa8c tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1873.578428] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-593650a8-806e-48c1-ae30-50e8281fcc9f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.590617] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-75edc3f3-b696-46f7-8090-95819be5fa8c tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1873.591531] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d60b5e1e-519f-4a7a-9125-a900abe78eae {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.600693] env[62684]: DEBUG oslo_vmware.api [None req-75edc3f3-b696-46f7-8090-95819be5fa8c tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Waiting for the task: (returnval){ [ 1873.600693] env[62684]: value = "task-2052731" [ 1873.600693] env[62684]: _type = "Task" [ 1873.600693] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.607238] env[62684]: DEBUG nova.virt.hardware [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1873.607519] env[62684]: DEBUG nova.virt.hardware [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1873.607697] env[62684]: DEBUG nova.virt.hardware [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1873.607921] env[62684]: DEBUG nova.virt.hardware [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1873.608194] env[62684]: DEBUG nova.virt.hardware [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1873.608394] env[62684]: DEBUG nova.virt.hardware [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1873.608630] env[62684]: DEBUG nova.virt.hardware [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1873.608802] env[62684]: DEBUG nova.virt.hardware [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1873.608977] env[62684]: DEBUG nova.virt.hardware [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1873.609165] env[62684]: DEBUG nova.virt.hardware [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1873.609347] env[62684]: DEBUG nova.virt.hardware [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1873.610163] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85a62929-c350-4e5f-9c50-578a0fa03616 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.617164] env[62684]: DEBUG oslo_vmware.api [None req-75edc3f3-b696-46f7-8090-95819be5fa8c tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052731, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.622339] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a57a3bbe-1c9a-4363-acdd-11eb376f6a7d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.668992] env[62684]: DEBUG nova.scheduler.client.report [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1873.710244] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1873.793837] env[62684]: DEBUG oslo_vmware.api [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2052729, 'name': ReconfigVM_Task, 'duration_secs': 0.356302} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.794175] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Reconfigured VM instance instance-00000029 to attach disk [datastore2] 025dfe36-1f14-4bda-84a0-d424364b745b/025dfe36-1f14-4bda-84a0-d424364b745b.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1873.794863] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b246a525-d016-4640-93e3-695ae6b4c329 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.802056] env[62684]: DEBUG oslo_vmware.api [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Waiting for the task: (returnval){ [ 1873.802056] env[62684]: value = "task-2052732" [ 1873.802056] env[62684]: _type = "Task" [ 1873.802056] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.811234] env[62684]: DEBUG oslo_vmware.api [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2052732, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.023593] env[62684]: DEBUG oslo_vmware.api [None req-272e1511-ea93-49d8-bf6f-d53c8f6e3349 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Task: {'id': task-2052730, 'name': PowerOffVM_Task, 'duration_secs': 0.221009} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.023919] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-272e1511-ea93-49d8-bf6f-d53c8f6e3349 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1874.024109] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-272e1511-ea93-49d8-bf6f-d53c8f6e3349 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1874.024379] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c545d8fd-eafb-4e37-a1a8-f1218b6ec951 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.109426] env[62684]: DEBUG oslo_vmware.api [None req-75edc3f3-b696-46f7-8090-95819be5fa8c tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052731, 'name': PowerOffVM_Task, 'duration_secs': 0.185732} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.109839] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-75edc3f3-b696-46f7-8090-95819be5fa8c tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1874.109931] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-75edc3f3-b696-46f7-8090-95819be5fa8c tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1874.110210] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c6c54882-05b8-483c-9be3-f06fede9dbc1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.129972] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-272e1511-ea93-49d8-bf6f-d53c8f6e3349 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1874.129972] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-272e1511-ea93-49d8-bf6f-d53c8f6e3349 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1874.129972] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-272e1511-ea93-49d8-bf6f-d53c8f6e3349 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Deleting the datastore file [datastore2] d532b5fa-90a3-4f25-8684-4eabaa432c86 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1874.129972] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aad55dc5-ede4-46c5-8a43-dd555535ba6b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.137109] env[62684]: DEBUG oslo_vmware.api [None req-272e1511-ea93-49d8-bf6f-d53c8f6e3349 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Waiting for the task: (returnval){ [ 1874.137109] env[62684]: value = "task-2052735" [ 1874.137109] env[62684]: _type = "Task" [ 1874.137109] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1874.145264] env[62684]: DEBUG oslo_vmware.api [None req-272e1511-ea93-49d8-bf6f-d53c8f6e3349 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Task: {'id': task-2052735, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.175834] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.640s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1874.175998] env[62684]: DEBUG nova.compute.manager [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1874.178666] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.052s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1874.178914] env[62684]: DEBUG nova.objects.instance [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Lazy-loading 'resources' on Instance uuid b788c51b-367b-4eef-93d2-faa8836469b6 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1874.215435] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-75edc3f3-b696-46f7-8090-95819be5fa8c tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1874.215733] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-75edc3f3-b696-46f7-8090-95819be5fa8c tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1874.215932] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-75edc3f3-b696-46f7-8090-95819be5fa8c tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Deleting the datastore file [datastore1] cfe219da-adf9-44b9-9df3-752ccf72a68b {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1874.216493] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-32160540-7e7c-4792-8fa1-e3468abce35b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.223087] env[62684]: DEBUG oslo_vmware.api [None req-75edc3f3-b696-46f7-8090-95819be5fa8c tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Waiting for the task: (returnval){ [ 1874.223087] env[62684]: value = "task-2052736" [ 1874.223087] env[62684]: _type = "Task" [ 1874.223087] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1874.230890] env[62684]: DEBUG oslo_vmware.api [None req-75edc3f3-b696-46f7-8090-95819be5fa8c tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052736, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.312908] env[62684]: DEBUG oslo_vmware.api [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2052732, 'name': Rename_Task, 'duration_secs': 0.148597} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.313282] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1874.313659] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5604bc32-0b34-4801-accd-172c1cc5146b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.320228] env[62684]: DEBUG oslo_vmware.api [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Waiting for the task: (returnval){ [ 1874.320228] env[62684]: value = "task-2052737" [ 1874.320228] env[62684]: _type = "Task" [ 1874.320228] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1874.327970] env[62684]: DEBUG oslo_vmware.api [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2052737, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.650862] env[62684]: DEBUG oslo_vmware.api [None req-272e1511-ea93-49d8-bf6f-d53c8f6e3349 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Task: {'id': task-2052735, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.177147} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.651160] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-272e1511-ea93-49d8-bf6f-d53c8f6e3349 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1874.651357] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-272e1511-ea93-49d8-bf6f-d53c8f6e3349 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1874.651596] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-272e1511-ea93-49d8-bf6f-d53c8f6e3349 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1874.651712] env[62684]: INFO nova.compute.manager [None req-272e1511-ea93-49d8-bf6f-d53c8f6e3349 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1874.651962] env[62684]: DEBUG oslo.service.loopingcall [None req-272e1511-ea93-49d8-bf6f-d53c8f6e3349 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1874.652182] env[62684]: DEBUG nova.compute.manager [-] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1874.652285] env[62684]: DEBUG nova.network.neutron [-] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1874.681925] env[62684]: DEBUG nova.compute.utils [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1874.685785] env[62684]: DEBUG nova.compute.manager [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1874.685978] env[62684]: DEBUG nova.network.neutron [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1874.738147] env[62684]: DEBUG oslo_vmware.api [None req-75edc3f3-b696-46f7-8090-95819be5fa8c tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052736, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167982} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.741078] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-75edc3f3-b696-46f7-8090-95819be5fa8c tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1874.741284] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-75edc3f3-b696-46f7-8090-95819be5fa8c tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1874.741525] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-75edc3f3-b696-46f7-8090-95819be5fa8c tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1874.741739] env[62684]: INFO nova.compute.manager [None req-75edc3f3-b696-46f7-8090-95819be5fa8c tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1874.742056] env[62684]: DEBUG oslo.service.loopingcall [None req-75edc3f3-b696-46f7-8090-95819be5fa8c tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1874.742712] env[62684]: DEBUG nova.compute.manager [-] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1874.742807] env[62684]: DEBUG nova.network.neutron [-] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1874.794504] env[62684]: DEBUG nova.policy [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eb7e9182deef4d3a85da6c81598e3625', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b4cd3bf56d0d4e5ba60f96f36034f45c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1874.832743] env[62684]: DEBUG oslo_vmware.api [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2052737, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.969714] env[62684]: DEBUG nova.network.neutron [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Successfully updated port: 8da4cae5-6ee0-4160-8056-921fb0de7b4f {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1875.191189] env[62684]: DEBUG nova.compute.manager [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1875.205023] env[62684]: DEBUG nova.compute.manager [req-3cb38cd7-f996-449b-ba19-9d42efb75aaa req-71b4faab-3799-4cd7-99ce-a55be897d7bf service nova] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Received event network-vif-plugged-8da4cae5-6ee0-4160-8056-921fb0de7b4f {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1875.205023] env[62684]: DEBUG oslo_concurrency.lockutils [req-3cb38cd7-f996-449b-ba19-9d42efb75aaa req-71b4faab-3799-4cd7-99ce-a55be897d7bf service nova] Acquiring lock "b945f05d-ef1c-4469-9390-f7bbd4f435f0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1875.205023] env[62684]: DEBUG oslo_concurrency.lockutils [req-3cb38cd7-f996-449b-ba19-9d42efb75aaa req-71b4faab-3799-4cd7-99ce-a55be897d7bf service nova] Lock "b945f05d-ef1c-4469-9390-f7bbd4f435f0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1875.205023] env[62684]: DEBUG oslo_concurrency.lockutils [req-3cb38cd7-f996-449b-ba19-9d42efb75aaa req-71b4faab-3799-4cd7-99ce-a55be897d7bf service nova] Lock "b945f05d-ef1c-4469-9390-f7bbd4f435f0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1875.205023] env[62684]: DEBUG nova.compute.manager [req-3cb38cd7-f996-449b-ba19-9d42efb75aaa req-71b4faab-3799-4cd7-99ce-a55be897d7bf service nova] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] No waiting events found dispatching network-vif-plugged-8da4cae5-6ee0-4160-8056-921fb0de7b4f {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1875.206268] env[62684]: WARNING nova.compute.manager [req-3cb38cd7-f996-449b-ba19-9d42efb75aaa req-71b4faab-3799-4cd7-99ce-a55be897d7bf service nova] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Received unexpected event network-vif-plugged-8da4cae5-6ee0-4160-8056-921fb0de7b4f for instance with vm_state building and task_state spawning. [ 1875.206454] env[62684]: DEBUG nova.compute.manager [req-3cb38cd7-f996-449b-ba19-9d42efb75aaa req-71b4faab-3799-4cd7-99ce-a55be897d7bf service nova] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Received event network-changed-8da4cae5-6ee0-4160-8056-921fb0de7b4f {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1875.206641] env[62684]: DEBUG nova.compute.manager [req-3cb38cd7-f996-449b-ba19-9d42efb75aaa req-71b4faab-3799-4cd7-99ce-a55be897d7bf service nova] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Refreshing instance network info cache due to event network-changed-8da4cae5-6ee0-4160-8056-921fb0de7b4f. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1875.206823] env[62684]: DEBUG oslo_concurrency.lockutils [req-3cb38cd7-f996-449b-ba19-9d42efb75aaa req-71b4faab-3799-4cd7-99ce-a55be897d7bf service nova] Acquiring lock "refresh_cache-b945f05d-ef1c-4469-9390-f7bbd4f435f0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1875.206978] env[62684]: DEBUG oslo_concurrency.lockutils [req-3cb38cd7-f996-449b-ba19-9d42efb75aaa req-71b4faab-3799-4cd7-99ce-a55be897d7bf service nova] Acquired lock "refresh_cache-b945f05d-ef1c-4469-9390-f7bbd4f435f0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1875.207192] env[62684]: DEBUG nova.network.neutron [req-3cb38cd7-f996-449b-ba19-9d42efb75aaa req-71b4faab-3799-4cd7-99ce-a55be897d7bf service nova] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Refreshing network info cache for port 8da4cae5-6ee0-4160-8056-921fb0de7b4f {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1875.275827] env[62684]: DEBUG nova.compute.manager [req-7a8ec409-8c84-4ade-a264-1659860f3a25 req-c3e1c8f0-c3e6-4e8c-8871-34a5735bd105 service nova] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Received event network-vif-deleted-1333c708-96f6-4c98-bc29-9be57f9be96f {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1875.276057] env[62684]: INFO nova.compute.manager [req-7a8ec409-8c84-4ade-a264-1659860f3a25 req-c3e1c8f0-c3e6-4e8c-8871-34a5735bd105 service nova] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Neutron deleted interface 1333c708-96f6-4c98-bc29-9be57f9be96f; detaching it from the instance and deleting it from the info cache [ 1875.276244] env[62684]: DEBUG nova.network.neutron [req-7a8ec409-8c84-4ade-a264-1659860f3a25 req-c3e1c8f0-c3e6-4e8c-8871-34a5735bd105 service nova] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1875.313715] env[62684]: DEBUG nova.network.neutron [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Successfully created port: c9bcfcec-29a8-4bb0-91f9-14d8c744d944 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1875.321653] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7862aaba-6bc9-4d2e-88d7-b5f8ab496b9a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.337428] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0f8be2c-d896-4c9f-8808-777bc9ffee82 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.340982] env[62684]: DEBUG oslo_vmware.api [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2052737, 'name': PowerOnVM_Task, 'duration_secs': 0.677436} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1875.341330] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1875.341579] env[62684]: INFO nova.compute.manager [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Took 8.72 seconds to spawn the instance on the hypervisor. [ 1875.341822] env[62684]: DEBUG nova.compute.manager [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1875.343036] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59305123-cfca-4bb2-90ac-519cb5252118 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.373772] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-362f8ecd-767e-4ce7-a5a5-598d6e318780 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.388688] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-673ec641-12c4-4b5c-8956-68e3ab3fbfc0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.403551] env[62684]: DEBUG nova.compute.provider_tree [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1875.475416] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Acquiring lock "refresh_cache-b945f05d-ef1c-4469-9390-f7bbd4f435f0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1875.744168] env[62684]: DEBUG nova.network.neutron [req-3cb38cd7-f996-449b-ba19-9d42efb75aaa req-71b4faab-3799-4cd7-99ce-a55be897d7bf service nova] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1875.746449] env[62684]: DEBUG nova.network.neutron [-] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1875.783565] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5c5aed2f-55bc-472d-823d-32342eac8001 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.796188] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd16f14a-90e4-413f-9739-34aa505d9e5a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.827705] env[62684]: DEBUG nova.compute.manager [req-7a8ec409-8c84-4ade-a264-1659860f3a25 req-c3e1c8f0-c3e6-4e8c-8871-34a5735bd105 service nova] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Detach interface failed, port_id=1333c708-96f6-4c98-bc29-9be57f9be96f, reason: Instance cfe219da-adf9-44b9-9df3-752ccf72a68b could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1875.829897] env[62684]: DEBUG nova.network.neutron [req-3cb38cd7-f996-449b-ba19-9d42efb75aaa req-71b4faab-3799-4cd7-99ce-a55be897d7bf service nova] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1875.884869] env[62684]: DEBUG nova.network.neutron [-] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1875.893090] env[62684]: INFO nova.compute.manager [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Took 42.63 seconds to build instance. [ 1875.924737] env[62684]: ERROR nova.scheduler.client.report [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] [req-d821ed8e-82b6-4e22-849a-9b556aa696cf] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c23c281e-ec1f-4876-972e-a98655f2084f. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d821ed8e-82b6-4e22-849a-9b556aa696cf"}]} [ 1875.944677] env[62684]: DEBUG nova.scheduler.client.report [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Refreshing inventories for resource provider c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1875.960016] env[62684]: DEBUG nova.scheduler.client.report [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Updating ProviderTree inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1875.960565] env[62684]: DEBUG nova.compute.provider_tree [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1875.973662] env[62684]: DEBUG nova.scheduler.client.report [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Refreshing aggregate associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, aggregates: None {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1875.994805] env[62684]: DEBUG nova.scheduler.client.report [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Refreshing trait associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1876.207544] env[62684]: DEBUG nova.compute.manager [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1876.238454] env[62684]: DEBUG nova.virt.hardware [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1876.238750] env[62684]: DEBUG nova.virt.hardware [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1876.238953] env[62684]: DEBUG nova.virt.hardware [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1876.239963] env[62684]: DEBUG nova.virt.hardware [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1876.240158] env[62684]: DEBUG nova.virt.hardware [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1876.240322] env[62684]: DEBUG nova.virt.hardware [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1876.240541] env[62684]: DEBUG nova.virt.hardware [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1876.240711] env[62684]: DEBUG nova.virt.hardware [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1876.240889] env[62684]: DEBUG nova.virt.hardware [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1876.241073] env[62684]: DEBUG nova.virt.hardware [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1876.241258] env[62684]: DEBUG nova.virt.hardware [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1876.242149] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3e566e5-ecad-45ac-aa2d-4d9ca56d6e53 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.249405] env[62684]: INFO nova.compute.manager [-] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Took 1.51 seconds to deallocate network for instance. [ 1876.257617] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17bd2d7b-2513-493d-818d-26506c17128d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.331107] env[62684]: DEBUG oslo_concurrency.lockutils [req-3cb38cd7-f996-449b-ba19-9d42efb75aaa req-71b4faab-3799-4cd7-99ce-a55be897d7bf service nova] Releasing lock "refresh_cache-b945f05d-ef1c-4469-9390-f7bbd4f435f0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1876.331483] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Acquired lock "refresh_cache-b945f05d-ef1c-4469-9390-f7bbd4f435f0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1876.331639] env[62684]: DEBUG nova.network.neutron [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1876.391485] env[62684]: INFO nova.compute.manager [-] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Took 1.74 seconds to deallocate network for instance. [ 1876.394896] env[62684]: DEBUG oslo_concurrency.lockutils [None req-eda741f2-1981-4530-9a3d-c38b28a3b7ea tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "025dfe36-1f14-4bda-84a0-d424364b745b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 87.164s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1876.455810] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e29599c7-d8c5-4b33-8f88-0c59f15c69c8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.463879] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8adc4d6-3d14-42a1-b490-da7a134ddf52 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.493760] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcfda4cc-612b-4e1a-bb05-9c77aa642f22 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.501815] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b64384a-54b0-45da-92c6-1cf064dc0623 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.515283] env[62684]: DEBUG nova.compute.provider_tree [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1876.758683] env[62684]: DEBUG oslo_concurrency.lockutils [None req-75edc3f3-b696-46f7-8090-95819be5fa8c tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1876.868070] env[62684]: DEBUG nova.network.neutron [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1876.900835] env[62684]: DEBUG oslo_concurrency.lockutils [None req-272e1511-ea93-49d8-bf6f-d53c8f6e3349 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1876.901256] env[62684]: DEBUG nova.compute.manager [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1877.029438] env[62684]: DEBUG nova.network.neutron [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Updating instance_info_cache with network_info: [{"id": "8da4cae5-6ee0-4160-8056-921fb0de7b4f", "address": "fa:16:3e:fa:5f:3d", "network": {"id": "e2a8905c-0e57-4d19-8af7-8d6c6b71190c", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1185486334-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efeb7b9b19c540c9a65cb4beed66d9bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a3f99df-d1bc-4a37-a048-263445d4a7b0", "external-id": "nsx-vlan-transportzone-374", "segmentation_id": 374, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8da4cae5-6e", "ovs_interfaceid": "8da4cae5-6ee0-4160-8056-921fb0de7b4f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1877.053320] env[62684]: DEBUG nova.scheduler.client.report [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Updated inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f with generation 67 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1877.053628] env[62684]: DEBUG nova.compute.provider_tree [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Updating resource provider c23c281e-ec1f-4876-972e-a98655f2084f generation from 67 to 68 during operation: update_inventory {{(pid=62684) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1877.053839] env[62684]: DEBUG nova.compute.provider_tree [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1877.175331] env[62684]: DEBUG nova.network.neutron [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Successfully updated port: c9bcfcec-29a8-4bb0-91f9-14d8c744d944 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1877.430489] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1877.471739] env[62684]: DEBUG nova.compute.manager [req-80e2d3bf-cc0e-434a-a1ab-6450a4b49baf req-e5119cf7-1d7c-414e-bc7b-fc74f9ba690b service nova] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Received event network-vif-plugged-c9bcfcec-29a8-4bb0-91f9-14d8c744d944 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1877.472109] env[62684]: DEBUG oslo_concurrency.lockutils [req-80e2d3bf-cc0e-434a-a1ab-6450a4b49baf req-e5119cf7-1d7c-414e-bc7b-fc74f9ba690b service nova] Acquiring lock "50bc9674-d19c-40f1-a89f-1738a1e48307-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1877.473473] env[62684]: DEBUG oslo_concurrency.lockutils [req-80e2d3bf-cc0e-434a-a1ab-6450a4b49baf req-e5119cf7-1d7c-414e-bc7b-fc74f9ba690b service nova] Lock "50bc9674-d19c-40f1-a89f-1738a1e48307-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1877.473473] env[62684]: DEBUG oslo_concurrency.lockutils [req-80e2d3bf-cc0e-434a-a1ab-6450a4b49baf req-e5119cf7-1d7c-414e-bc7b-fc74f9ba690b service nova] Lock "50bc9674-d19c-40f1-a89f-1738a1e48307-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1877.473473] env[62684]: DEBUG nova.compute.manager [req-80e2d3bf-cc0e-434a-a1ab-6450a4b49baf req-e5119cf7-1d7c-414e-bc7b-fc74f9ba690b service nova] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] No waiting events found dispatching network-vif-plugged-c9bcfcec-29a8-4bb0-91f9-14d8c744d944 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1877.473473] env[62684]: WARNING nova.compute.manager [req-80e2d3bf-cc0e-434a-a1ab-6450a4b49baf req-e5119cf7-1d7c-414e-bc7b-fc74f9ba690b service nova] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Received unexpected event network-vif-plugged-c9bcfcec-29a8-4bb0-91f9-14d8c744d944 for instance with vm_state building and task_state spawning. [ 1877.473473] env[62684]: DEBUG nova.compute.manager [req-80e2d3bf-cc0e-434a-a1ab-6450a4b49baf req-e5119cf7-1d7c-414e-bc7b-fc74f9ba690b service nova] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Received event network-changed-c9bcfcec-29a8-4bb0-91f9-14d8c744d944 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1877.473744] env[62684]: DEBUG nova.compute.manager [req-80e2d3bf-cc0e-434a-a1ab-6450a4b49baf req-e5119cf7-1d7c-414e-bc7b-fc74f9ba690b service nova] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Refreshing instance network info cache due to event network-changed-c9bcfcec-29a8-4bb0-91f9-14d8c744d944. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1877.473744] env[62684]: DEBUG oslo_concurrency.lockutils [req-80e2d3bf-cc0e-434a-a1ab-6450a4b49baf req-e5119cf7-1d7c-414e-bc7b-fc74f9ba690b service nova] Acquiring lock "refresh_cache-50bc9674-d19c-40f1-a89f-1738a1e48307" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1877.473975] env[62684]: DEBUG oslo_concurrency.lockutils [req-80e2d3bf-cc0e-434a-a1ab-6450a4b49baf req-e5119cf7-1d7c-414e-bc7b-fc74f9ba690b service nova] Acquired lock "refresh_cache-50bc9674-d19c-40f1-a89f-1738a1e48307" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1877.474048] env[62684]: DEBUG nova.network.neutron [req-80e2d3bf-cc0e-434a-a1ab-6450a4b49baf req-e5119cf7-1d7c-414e-bc7b-fc74f9ba690b service nova] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Refreshing network info cache for port c9bcfcec-29a8-4bb0-91f9-14d8c744d944 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1877.532350] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Releasing lock "refresh_cache-b945f05d-ef1c-4469-9390-f7bbd4f435f0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1877.533102] env[62684]: DEBUG nova.compute.manager [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Instance network_info: |[{"id": "8da4cae5-6ee0-4160-8056-921fb0de7b4f", "address": "fa:16:3e:fa:5f:3d", "network": {"id": "e2a8905c-0e57-4d19-8af7-8d6c6b71190c", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1185486334-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efeb7b9b19c540c9a65cb4beed66d9bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a3f99df-d1bc-4a37-a048-263445d4a7b0", "external-id": "nsx-vlan-transportzone-374", "segmentation_id": 374, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8da4cae5-6e", "ovs_interfaceid": "8da4cae5-6ee0-4160-8056-921fb0de7b4f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1877.534897] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fa:5f:3d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0a3f99df-d1bc-4a37-a048-263445d4a7b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8da4cae5-6ee0-4160-8056-921fb0de7b4f', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1877.555721] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Creating folder: Project (efeb7b9b19c540c9a65cb4beed66d9bc). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1877.558774] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-430b071d-7141-4b96-8c8b-c851d6f92895 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.565388] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.387s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1877.569129] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.723s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1877.571852] env[62684]: INFO nova.compute.claims [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1877.582396] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Created folder: Project (efeb7b9b19c540c9a65cb4beed66d9bc) in parent group-v421118. [ 1877.582832] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Creating folder: Instances. Parent ref: group-v421244. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1877.583273] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eff677c6-3ecf-429f-82b0-77066124db03 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.590297] env[62684]: INFO nova.scheduler.client.report [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Deleted allocations for instance b788c51b-367b-4eef-93d2-faa8836469b6 [ 1877.601026] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Created folder: Instances in parent group-v421244. [ 1877.601026] env[62684]: DEBUG oslo.service.loopingcall [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1877.601026] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1877.601026] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9ccc7119-63fa-4bb6-9487-aab169710957 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.635317] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1877.635317] env[62684]: value = "task-2052740" [ 1877.635317] env[62684]: _type = "Task" [ 1877.635317] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1877.649971] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052740, 'name': CreateVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1877.680778] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Acquiring lock "refresh_cache-50bc9674-d19c-40f1-a89f-1738a1e48307" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1877.888722] env[62684]: DEBUG nova.compute.manager [req-b3582e6a-a95e-4e8f-81a7-695c92e7c283 req-67c68ec5-7146-4f4b-88c4-ef64d6ae2822 service nova] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Received event network-vif-deleted-f8d1bd12-b449-41ef-bd95-755f619b639a {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1878.034047] env[62684]: DEBUG nova.network.neutron [req-80e2d3bf-cc0e-434a-a1ab-6450a4b49baf req-e5119cf7-1d7c-414e-bc7b-fc74f9ba690b service nova] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1878.104951] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6505778a-1228-4b1f-a22f-4d1b04e8e76d tempest-DeleteServersAdminTestJSON-1961784838 tempest-DeleteServersAdminTestJSON-1961784838-project-member] Lock "b788c51b-367b-4eef-93d2-faa8836469b6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.967s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1878.105350] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "b788c51b-367b-4eef-93d2-faa8836469b6" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 22.753s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1878.105696] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8cb50a78-32f3-4fe9-9201-77272a8f7cfd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.116142] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2d8cebd-cc4b-41ed-81d9-e33c52e69300 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.160898] env[62684]: DEBUG nova.network.neutron [req-80e2d3bf-cc0e-434a-a1ab-6450a4b49baf req-e5119cf7-1d7c-414e-bc7b-fc74f9ba690b service nova] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1878.165755] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052740, 'name': CreateVM_Task, 'duration_secs': 0.378961} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1878.166277] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1878.167028] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1878.167300] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1878.167668] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1878.168078] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb0c4b8c-4c5e-460b-8f66-279bdcc7c9ea {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.175208] env[62684]: DEBUG oslo_vmware.api [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Waiting for the task: (returnval){ [ 1878.175208] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52348c47-b72c-fd2f-99ef-903e979506ff" [ 1878.175208] env[62684]: _type = "Task" [ 1878.175208] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1878.187100] env[62684]: DEBUG oslo_vmware.api [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52348c47-b72c-fd2f-99ef-903e979506ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.659486] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "b788c51b-367b-4eef-93d2-faa8836469b6" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.554s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1878.669193] env[62684]: DEBUG oslo_concurrency.lockutils [req-80e2d3bf-cc0e-434a-a1ab-6450a4b49baf req-e5119cf7-1d7c-414e-bc7b-fc74f9ba690b service nova] Releasing lock "refresh_cache-50bc9674-d19c-40f1-a89f-1738a1e48307" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1878.669905] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Acquired lock "refresh_cache-50bc9674-d19c-40f1-a89f-1738a1e48307" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1878.670082] env[62684]: DEBUG nova.network.neutron [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1878.687260] env[62684]: DEBUG oslo_vmware.api [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52348c47-b72c-fd2f-99ef-903e979506ff, 'name': SearchDatastore_Task, 'duration_secs': 0.011898} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1878.688079] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1878.688079] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1878.688408] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1878.688408] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1878.688548] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1878.688936] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a916d403-184e-482f-8975-de7ca6c563f3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.699782] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1878.699966] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1878.701251] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70802a7e-b9ea-462e-b7de-9ade2ef2b813 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.706454] env[62684]: DEBUG oslo_vmware.api [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Waiting for the task: (returnval){ [ 1878.706454] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529eb493-292d-ea24-6360-b647c237c41f" [ 1878.706454] env[62684]: _type = "Task" [ 1878.706454] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1878.716660] env[62684]: DEBUG oslo_vmware.api [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529eb493-292d-ea24-6360-b647c237c41f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.879070] env[62684]: DEBUG oslo_concurrency.lockutils [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Acquiring lock "5bc73032-45f9-4b5c-a4ea-e07c48e4f82b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1878.879364] env[62684]: DEBUG oslo_concurrency.lockutils [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Lock "5bc73032-45f9-4b5c-a4ea-e07c48e4f82b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1878.879610] env[62684]: DEBUG oslo_concurrency.lockutils [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Acquiring lock "5bc73032-45f9-4b5c-a4ea-e07c48e4f82b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1878.879778] env[62684]: DEBUG oslo_concurrency.lockutils [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Lock "5bc73032-45f9-4b5c-a4ea-e07c48e4f82b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1878.879959] env[62684]: DEBUG oslo_concurrency.lockutils [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Lock "5bc73032-45f9-4b5c-a4ea-e07c48e4f82b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1878.885361] env[62684]: INFO nova.compute.manager [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Terminating instance [ 1878.887879] env[62684]: DEBUG nova.compute.manager [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1878.887879] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1878.888334] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-be3d9489-72fa-4b47-8243-6ad27c2527a3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.896856] env[62684]: DEBUG oslo_vmware.api [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Waiting for the task: (returnval){ [ 1878.896856] env[62684]: value = "task-2052741" [ 1878.896856] env[62684]: _type = "Task" [ 1878.896856] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1878.909081] env[62684]: DEBUG oslo_vmware.api [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Task: {'id': task-2052741, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.969312] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b7d40e89-e492-4d55-a633-7950cda4ce4b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Acquiring lock "f44b2e88-af6d-4252-b562-9d5fa7745b56" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1878.969597] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b7d40e89-e492-4d55-a633-7950cda4ce4b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Lock "f44b2e88-af6d-4252-b562-9d5fa7745b56" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1878.969761] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b7d40e89-e492-4d55-a633-7950cda4ce4b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Acquiring lock "f44b2e88-af6d-4252-b562-9d5fa7745b56-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1878.970268] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b7d40e89-e492-4d55-a633-7950cda4ce4b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Lock "f44b2e88-af6d-4252-b562-9d5fa7745b56-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1878.970353] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b7d40e89-e492-4d55-a633-7950cda4ce4b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Lock "f44b2e88-af6d-4252-b562-9d5fa7745b56-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1878.972700] env[62684]: INFO nova.compute.manager [None req-b7d40e89-e492-4d55-a633-7950cda4ce4b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Terminating instance [ 1878.974924] env[62684]: DEBUG nova.compute.manager [None req-b7d40e89-e492-4d55-a633-7950cda4ce4b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1878.974924] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b7d40e89-e492-4d55-a633-7950cda4ce4b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1878.975341] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1277825-be6d-4327-960f-32746ecd89ed {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.985999] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7d40e89-e492-4d55-a633-7950cda4ce4b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1878.990255] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7a23020c-d917-45c0-821d-977c6fd1b75d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.997298] env[62684]: DEBUG oslo_vmware.api [None req-b7d40e89-e492-4d55-a633-7950cda4ce4b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Waiting for the task: (returnval){ [ 1878.997298] env[62684]: value = "task-2052742" [ 1878.997298] env[62684]: _type = "Task" [ 1878.997298] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1879.009122] env[62684]: DEBUG oslo_vmware.api [None req-b7d40e89-e492-4d55-a633-7950cda4ce4b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Task: {'id': task-2052742, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.098233] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33df7b60-68cf-4a1f-9c4d-8d83caea005b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.107103] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1aa48af-bb32-4804-963b-916358476e7c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.140812] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faa52e3f-a11a-4ae8-986f-b9d1f100df5a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.150168] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d3c2608-ecff-4eb4-ab67-5740136b2913 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.167869] env[62684]: DEBUG nova.compute.provider_tree [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1879.217193] env[62684]: DEBUG oslo_vmware.api [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529eb493-292d-ea24-6360-b647c237c41f, 'name': SearchDatastore_Task, 'duration_secs': 0.017225} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1879.218098] env[62684]: DEBUG nova.network.neutron [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1879.220604] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d7439be-cb17-4e85-9485-c73f685f0ea8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.226106] env[62684]: DEBUG oslo_vmware.api [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Waiting for the task: (returnval){ [ 1879.226106] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]528e3839-c055-0bf9-b8d8-ca5d887c42c9" [ 1879.226106] env[62684]: _type = "Task" [ 1879.226106] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1879.233901] env[62684]: DEBUG oslo_vmware.api [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]528e3839-c055-0bf9-b8d8-ca5d887c42c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.410448] env[62684]: DEBUG oslo_vmware.api [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Task: {'id': task-2052741, 'name': PowerOffVM_Task, 'duration_secs': 0.206664} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1879.410795] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1879.411522] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Volume detach. Driver type: vmdk {{(pid=62684) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1879.411632] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421128', 'volume_id': '34523d13-ed90-416e-a19a-57c837136d21', 'name': 'volume-34523d13-ed90-416e-a19a-57c837136d21', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5bc73032-45f9-4b5c-a4ea-e07c48e4f82b', 'attached_at': '', 'detached_at': '', 'volume_id': '34523d13-ed90-416e-a19a-57c837136d21', 'serial': '34523d13-ed90-416e-a19a-57c837136d21'} {{(pid=62684) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1879.412497] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54eff93b-556f-4e7f-8379-a18d91194114 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.436779] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32c99a0b-0bfe-4331-9542-8c653f976225 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.446554] env[62684]: DEBUG nova.network.neutron [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Updating instance_info_cache with network_info: [{"id": "c9bcfcec-29a8-4bb0-91f9-14d8c744d944", "address": "fa:16:3e:4e:2c:5e", "network": {"id": "80d16806-3eb2-4a9b-b710-ff0490e0b3a2", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1680491689-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b4cd3bf56d0d4e5ba60f96f36034f45c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e365f3b9-706b-4fa2-8f95-ae51b35ab011", "external-id": "nsx-vlan-transportzone-154", "segmentation_id": 154, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9bcfcec-29", "ovs_interfaceid": "c9bcfcec-29a8-4bb0-91f9-14d8c744d944", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1879.448408] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79b1137f-6240-4545-a324-b5ab50bb6905 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.470056] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94a09d3b-470f-4d92-aaed-8bc07ee3646f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.486150] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] The volume has not been displaced from its original location: [datastore1] volume-34523d13-ed90-416e-a19a-57c837136d21/volume-34523d13-ed90-416e-a19a-57c837136d21.vmdk. No consolidation needed. {{(pid=62684) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1879.491848] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Reconfiguring VM instance instance-00000010 to detach disk 2000 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1879.493704] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-42507f0c-9a94-4fa5-b71e-d7198f2e3997 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.508722] env[62684]: DEBUG nova.compute.manager [req-ffe108b5-4b1f-4c0c-aa43-8dcc7552b2ac req-8a33beb8-285c-4897-bf9f-4bc12842cfff service nova] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Received event network-changed-1d1c0f31-e026-45f0-b3c8-5ba02555e863 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1879.508931] env[62684]: DEBUG nova.compute.manager [req-ffe108b5-4b1f-4c0c-aa43-8dcc7552b2ac req-8a33beb8-285c-4897-bf9f-4bc12842cfff service nova] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Refreshing instance network info cache due to event network-changed-1d1c0f31-e026-45f0-b3c8-5ba02555e863. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1879.509177] env[62684]: DEBUG oslo_concurrency.lockutils [req-ffe108b5-4b1f-4c0c-aa43-8dcc7552b2ac req-8a33beb8-285c-4897-bf9f-4bc12842cfff service nova] Acquiring lock "refresh_cache-025dfe36-1f14-4bda-84a0-d424364b745b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1879.509698] env[62684]: DEBUG oslo_concurrency.lockutils [req-ffe108b5-4b1f-4c0c-aa43-8dcc7552b2ac req-8a33beb8-285c-4897-bf9f-4bc12842cfff service nova] Acquired lock "refresh_cache-025dfe36-1f14-4bda-84a0-d424364b745b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1879.509800] env[62684]: DEBUG nova.network.neutron [req-ffe108b5-4b1f-4c0c-aa43-8dcc7552b2ac req-8a33beb8-285c-4897-bf9f-4bc12842cfff service nova] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Refreshing network info cache for port 1d1c0f31-e026-45f0-b3c8-5ba02555e863 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1879.521877] env[62684]: DEBUG oslo_vmware.api [None req-b7d40e89-e492-4d55-a633-7950cda4ce4b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Task: {'id': task-2052742, 'name': PowerOffVM_Task, 'duration_secs': 0.184062} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1879.522813] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7d40e89-e492-4d55-a633-7950cda4ce4b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1879.522994] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b7d40e89-e492-4d55-a633-7950cda4ce4b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1879.523325] env[62684]: DEBUG oslo_vmware.api [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Waiting for the task: (returnval){ [ 1879.523325] env[62684]: value = "task-2052743" [ 1879.523325] env[62684]: _type = "Task" [ 1879.523325] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1879.523525] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-56fdbb6b-6c90-47f8-9c9b-69f0f72f6aaf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.533870] env[62684]: DEBUG oslo_vmware.api [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Task: {'id': task-2052743, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.606507] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b7d40e89-e492-4d55-a633-7950cda4ce4b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1879.606791] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b7d40e89-e492-4d55-a633-7950cda4ce4b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1879.607015] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7d40e89-e492-4d55-a633-7950cda4ce4b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Deleting the datastore file [datastore2] f44b2e88-af6d-4252-b562-9d5fa7745b56 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1879.607292] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-67f677cb-17d6-4253-9c41-3bd05daea4f1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.613849] env[62684]: DEBUG oslo_vmware.api [None req-b7d40e89-e492-4d55-a633-7950cda4ce4b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Waiting for the task: (returnval){ [ 1879.613849] env[62684]: value = "task-2052745" [ 1879.613849] env[62684]: _type = "Task" [ 1879.613849] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1879.622904] env[62684]: DEBUG oslo_vmware.api [None req-b7d40e89-e492-4d55-a633-7950cda4ce4b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Task: {'id': task-2052745, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.671867] env[62684]: DEBUG nova.scheduler.client.report [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1879.738868] env[62684]: DEBUG oslo_vmware.api [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]528e3839-c055-0bf9-b8d8-ca5d887c42c9, 'name': SearchDatastore_Task, 'duration_secs': 0.00962} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1879.739155] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1879.739496] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] b945f05d-ef1c-4469-9390-f7bbd4f435f0/b945f05d-ef1c-4469-9390-f7bbd4f435f0.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1879.739765] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5df4941c-99ec-4fb7-b172-c7eb0a755ae1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.746256] env[62684]: DEBUG oslo_vmware.api [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Waiting for the task: (returnval){ [ 1879.746256] env[62684]: value = "task-2052746" [ 1879.746256] env[62684]: _type = "Task" [ 1879.746256] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1879.754408] env[62684]: DEBUG oslo_vmware.api [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Task: {'id': task-2052746, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.936906] env[62684]: DEBUG nova.objects.instance [None req-f919698b-4cf5-4e1f-96bd-805d6b568dbd tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Lazy-loading 'flavor' on Instance uuid 0dbd52ac-c987-4728-974e-73e99465c5e7 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1879.952094] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Releasing lock "refresh_cache-50bc9674-d19c-40f1-a89f-1738a1e48307" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1879.952510] env[62684]: DEBUG nova.compute.manager [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Instance network_info: |[{"id": "c9bcfcec-29a8-4bb0-91f9-14d8c744d944", "address": "fa:16:3e:4e:2c:5e", "network": {"id": "80d16806-3eb2-4a9b-b710-ff0490e0b3a2", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1680491689-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b4cd3bf56d0d4e5ba60f96f36034f45c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e365f3b9-706b-4fa2-8f95-ae51b35ab011", "external-id": "nsx-vlan-transportzone-154", "segmentation_id": 154, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9bcfcec-29", "ovs_interfaceid": "c9bcfcec-29a8-4bb0-91f9-14d8c744d944", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1879.953414] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4e:2c:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e365f3b9-706b-4fa2-8f95-ae51b35ab011', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c9bcfcec-29a8-4bb0-91f9-14d8c744d944', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1879.961575] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Creating folder: Project (b4cd3bf56d0d4e5ba60f96f36034f45c). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1879.962349] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e6460d39-293c-4ddc-9a58-514dab78a954 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.973892] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Created folder: Project (b4cd3bf56d0d4e5ba60f96f36034f45c) in parent group-v421118. [ 1879.974678] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Creating folder: Instances. Parent ref: group-v421247. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1879.974878] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e5d611db-7677-481e-8ccd-4b38d8657280 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.986669] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Created folder: Instances in parent group-v421247. [ 1879.986978] env[62684]: DEBUG oslo.service.loopingcall [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1879.987219] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1879.987446] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7526d467-8e90-47f6-b753-ac0d4f231541 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.009368] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1880.009368] env[62684]: value = "task-2052749" [ 1880.009368] env[62684]: _type = "Task" [ 1880.009368] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.024937] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052749, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.035725] env[62684]: DEBUG oslo_vmware.api [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Task: {'id': task-2052743, 'name': ReconfigVM_Task, 'duration_secs': 0.171719} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1880.036515] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Reconfigured VM instance instance-00000010 to detach disk 2000 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1880.041637] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-84e6c645-a43d-4111-8a34-8278afb5f05c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.060886] env[62684]: DEBUG oslo_vmware.api [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Waiting for the task: (returnval){ [ 1880.060886] env[62684]: value = "task-2052750" [ 1880.060886] env[62684]: _type = "Task" [ 1880.060886] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.070632] env[62684]: DEBUG oslo_vmware.api [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Task: {'id': task-2052750, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.127031] env[62684]: DEBUG oslo_vmware.api [None req-b7d40e89-e492-4d55-a633-7950cda4ce4b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Task: {'id': task-2052745, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.1445} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1880.127576] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7d40e89-e492-4d55-a633-7950cda4ce4b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1880.127791] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b7d40e89-e492-4d55-a633-7950cda4ce4b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1880.128057] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b7d40e89-e492-4d55-a633-7950cda4ce4b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1880.128313] env[62684]: INFO nova.compute.manager [None req-b7d40e89-e492-4d55-a633-7950cda4ce4b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1880.128607] env[62684]: DEBUG oslo.service.loopingcall [None req-b7d40e89-e492-4d55-a633-7950cda4ce4b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1880.129083] env[62684]: DEBUG nova.compute.manager [-] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1880.129228] env[62684]: DEBUG nova.network.neutron [-] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1880.181401] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.613s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1880.182194] env[62684]: DEBUG nova.compute.manager [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1880.185652] env[62684]: DEBUG oslo_concurrency.lockutils [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.199s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1880.187921] env[62684]: INFO nova.compute.claims [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1880.258039] env[62684]: DEBUG oslo_vmware.api [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Task: {'id': task-2052746, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.468961} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1880.258467] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] b945f05d-ef1c-4469-9390-f7bbd4f435f0/b945f05d-ef1c-4469-9390-f7bbd4f435f0.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1880.258816] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1880.259197] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5b9baaff-ecd1-4876-8542-447dff4cdbb2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.265411] env[62684]: DEBUG oslo_vmware.api [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Waiting for the task: (returnval){ [ 1880.265411] env[62684]: value = "task-2052751" [ 1880.265411] env[62684]: _type = "Task" [ 1880.265411] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.274186] env[62684]: DEBUG oslo_vmware.api [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Task: {'id': task-2052751, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.399784] env[62684]: DEBUG nova.network.neutron [req-ffe108b5-4b1f-4c0c-aa43-8dcc7552b2ac req-8a33beb8-285c-4897-bf9f-4bc12842cfff service nova] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Updated VIF entry in instance network info cache for port 1d1c0f31-e026-45f0-b3c8-5ba02555e863. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1880.399784] env[62684]: DEBUG nova.network.neutron [req-ffe108b5-4b1f-4c0c-aa43-8dcc7552b2ac req-8a33beb8-285c-4897-bf9f-4bc12842cfff service nova] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Updating instance_info_cache with network_info: [{"id": "1d1c0f31-e026-45f0-b3c8-5ba02555e863", "address": "fa:16:3e:42:6e:d1", "network": {"id": "bf53c8de-5f43-4a15-9911-25340615a63b", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1946277195-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "540d70f4b6274c38a5e79c00e389d8fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6db039c-542c-4544-a57d-ddcc6c1e8e45", "external-id": "nsx-vlan-transportzone-810", "segmentation_id": 810, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d1c0f31-e0", "ovs_interfaceid": "1d1c0f31-e026-45f0-b3c8-5ba02555e863", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1880.444206] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f919698b-4cf5-4e1f-96bd-805d6b568dbd tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Acquiring lock "refresh_cache-0dbd52ac-c987-4728-974e-73e99465c5e7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1880.444555] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f919698b-4cf5-4e1f-96bd-805d6b568dbd tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Acquired lock "refresh_cache-0dbd52ac-c987-4728-974e-73e99465c5e7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1880.520927] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052749, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.573019] env[62684]: DEBUG oslo_vmware.api [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Task: {'id': task-2052750, 'name': ReconfigVM_Task, 'duration_secs': 0.218115} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1880.573019] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421128', 'volume_id': '34523d13-ed90-416e-a19a-57c837136d21', 'name': 'volume-34523d13-ed90-416e-a19a-57c837136d21', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5bc73032-45f9-4b5c-a4ea-e07c48e4f82b', 'attached_at': '', 'detached_at': '', 'volume_id': '34523d13-ed90-416e-a19a-57c837136d21', 'serial': '34523d13-ed90-416e-a19a-57c837136d21'} {{(pid=62684) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1880.573019] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1880.573019] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb497a3c-4062-4640-b14e-e79c5b75deb7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.578929] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1880.579351] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0b28b25a-081b-4b41-9f0c-e1e8b25ff4f4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.698198] env[62684]: DEBUG nova.compute.utils [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1880.707172] env[62684]: DEBUG nova.compute.manager [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1880.707570] env[62684]: DEBUG nova.network.neutron [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1880.736025] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1880.736025] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1880.736025] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Deleting the datastore file [datastore1] 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1880.736025] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f388d9c3-69c2-4f90-a225-7596db0a600b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.744018] env[62684]: DEBUG oslo_vmware.api [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Waiting for the task: (returnval){ [ 1880.744018] env[62684]: value = "task-2052753" [ 1880.744018] env[62684]: _type = "Task" [ 1880.744018] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.752325] env[62684]: DEBUG oslo_vmware.api [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Task: {'id': task-2052753, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.774093] env[62684]: DEBUG nova.policy [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6e8b54745b53458eafe4d911d7d6d7d1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6c54f74085f343d2b790145b0d82a9f8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1880.779782] env[62684]: DEBUG oslo_vmware.api [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Task: {'id': task-2052751, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.901891] env[62684]: DEBUG oslo_concurrency.lockutils [req-ffe108b5-4b1f-4c0c-aa43-8dcc7552b2ac req-8a33beb8-285c-4897-bf9f-4bc12842cfff service nova] Releasing lock "refresh_cache-025dfe36-1f14-4bda-84a0-d424364b745b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1881.024047] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052749, 'name': CreateVM_Task, 'duration_secs': 0.822581} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.024047] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1881.024242] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1881.024374] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1881.024701] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1881.024968] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0eda8ea3-97b8-49dc-869d-7f6c59cd1684 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.035360] env[62684]: DEBUG oslo_vmware.api [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Waiting for the task: (returnval){ [ 1881.035360] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520ebc32-1fd7-05ba-4298-2568fc2bca25" [ 1881.035360] env[62684]: _type = "Task" [ 1881.035360] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.042251] env[62684]: DEBUG oslo_vmware.api [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520ebc32-1fd7-05ba-4298-2568fc2bca25, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.077790] env[62684]: DEBUG nova.compute.manager [req-5f03bf68-c73d-4387-a1ae-89081618c262 req-64ac5ff8-3ce9-4726-953a-98431f89f942 service nova] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Received event network-vif-deleted-04643826-a651-4eba-be4f-57825aa4f302 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1881.078016] env[62684]: INFO nova.compute.manager [req-5f03bf68-c73d-4387-a1ae-89081618c262 req-64ac5ff8-3ce9-4726-953a-98431f89f942 service nova] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Neutron deleted interface 04643826-a651-4eba-be4f-57825aa4f302; detaching it from the instance and deleting it from the info cache [ 1881.078219] env[62684]: DEBUG nova.network.neutron [req-5f03bf68-c73d-4387-a1ae-89081618c262 req-64ac5ff8-3ce9-4726-953a-98431f89f942 service nova] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1881.148350] env[62684]: DEBUG nova.network.neutron [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Successfully created port: 5d09e22d-005f-49a7-8c55-7d69dfd47687 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1881.206822] env[62684]: DEBUG nova.compute.manager [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1881.261953] env[62684]: DEBUG oslo_vmware.api [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Task: {'id': task-2052753, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077897} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.265573] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1881.265573] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1881.265573] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1881.265573] env[62684]: INFO nova.compute.manager [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Took 2.38 seconds to destroy the instance on the hypervisor. [ 1881.265573] env[62684]: DEBUG oslo.service.loopingcall [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1881.265839] env[62684]: DEBUG nova.compute.manager [-] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1881.265839] env[62684]: DEBUG nova.network.neutron [-] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1881.282195] env[62684]: DEBUG oslo_vmware.api [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Task: {'id': task-2052751, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.285472] env[62684]: DEBUG nova.network.neutron [None req-f919698b-4cf5-4e1f-96bd-805d6b568dbd tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1881.483380] env[62684]: DEBUG nova.network.neutron [-] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1881.545887] env[62684]: DEBUG oslo_vmware.api [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520ebc32-1fd7-05ba-4298-2568fc2bca25, 'name': SearchDatastore_Task, 'duration_secs': 0.010357} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.548742] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1881.549183] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1881.549627] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1881.549935] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1881.550276] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1881.550961] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6b26a7f9-b299-4902-8b92-e10ff609009b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.563869] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1881.563869] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1881.563869] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ba0b517-4ffe-45ab-a307-bca2d2969a96 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.570570] env[62684]: DEBUG oslo_vmware.api [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Waiting for the task: (returnval){ [ 1881.570570] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]526b4cb6-afe9-a5be-e50c-cf989ba2c496" [ 1881.570570] env[62684]: _type = "Task" [ 1881.570570] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.583109] env[62684]: DEBUG oslo_vmware.api [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]526b4cb6-afe9-a5be-e50c-cf989ba2c496, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.585755] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e959fd9a-7aee-4e7f-86ea-63937b164df5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.594138] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6042ca65-d99d-413a-8158-111a5b86258d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.630350] env[62684]: DEBUG nova.compute.manager [req-5f03bf68-c73d-4387-a1ae-89081618c262 req-64ac5ff8-3ce9-4726-953a-98431f89f942 service nova] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Detach interface failed, port_id=04643826-a651-4eba-be4f-57825aa4f302, reason: Instance f44b2e88-af6d-4252-b562-9d5fa7745b56 could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1881.645222] env[62684]: DEBUG nova.compute.manager [req-96e07437-25b0-4686-9cdb-8d9290ad3b49 req-76e30ebf-1c15-4782-9a84-c45a34e70154 service nova] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Received event network-changed-de8de653-ec88-4a72-840c-27978f584581 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1881.645343] env[62684]: DEBUG nova.compute.manager [req-96e07437-25b0-4686-9cdb-8d9290ad3b49 req-76e30ebf-1c15-4782-9a84-c45a34e70154 service nova] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Refreshing instance network info cache due to event network-changed-de8de653-ec88-4a72-840c-27978f584581. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1881.645463] env[62684]: DEBUG oslo_concurrency.lockutils [req-96e07437-25b0-4686-9cdb-8d9290ad3b49 req-76e30ebf-1c15-4782-9a84-c45a34e70154 service nova] Acquiring lock "refresh_cache-0dbd52ac-c987-4728-974e-73e99465c5e7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1881.790502] env[62684]: DEBUG oslo_vmware.api [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Task: {'id': task-2052751, 'name': ExtendVirtualDisk_Task, 'duration_secs': 1.021723} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.791904] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1881.797021] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc8f3fae-422c-4557-8d30-20c960ca9e49 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.822858] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Reconfiguring VM instance instance-0000002a to attach disk [datastore1] b945f05d-ef1c-4469-9390-f7bbd4f435f0/b945f05d-ef1c-4469-9390-f7bbd4f435f0.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1881.826554] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fe0a7e1b-f2ed-400b-a014-a50bf5b0a67f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.855272] env[62684]: DEBUG oslo_vmware.api [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Waiting for the task: (returnval){ [ 1881.855272] env[62684]: value = "task-2052754" [ 1881.855272] env[62684]: _type = "Task" [ 1881.855272] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.865215] env[62684]: DEBUG oslo_vmware.api [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Task: {'id': task-2052754, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.865995] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6424104-0ca7-4500-98fc-ab46183bd362 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.873824] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8e1294c-0d78-43c2-96c4-457ced6d73a7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.912516] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b14bb8f-3a9b-4916-a942-39b42f545c1c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.920926] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85f7beb0-aeba-452b-a7e1-175a2a336cfb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.937647] env[62684]: DEBUG nova.compute.provider_tree [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1881.984062] env[62684]: INFO nova.compute.manager [-] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Took 1.85 seconds to deallocate network for instance. [ 1882.081625] env[62684]: DEBUG oslo_vmware.api [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]526b4cb6-afe9-a5be-e50c-cf989ba2c496, 'name': SearchDatastore_Task, 'duration_secs': 0.009254} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1882.082472] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-add50d7f-9707-4425-be2b-d713c95d8b0d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.088622] env[62684]: DEBUG oslo_vmware.api [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Waiting for the task: (returnval){ [ 1882.088622] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]526511bd-432c-4f54-b025-c6e8d02a9079" [ 1882.088622] env[62684]: _type = "Task" [ 1882.088622] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1882.096900] env[62684]: DEBUG oslo_vmware.api [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]526511bd-432c-4f54-b025-c6e8d02a9079, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.225463] env[62684]: DEBUG nova.compute.manager [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1882.260895] env[62684]: DEBUG nova.virt.hardware [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1882.261375] env[62684]: DEBUG nova.virt.hardware [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1882.262120] env[62684]: DEBUG nova.virt.hardware [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1882.262476] env[62684]: DEBUG nova.virt.hardware [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1882.262808] env[62684]: DEBUG nova.virt.hardware [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1882.263090] env[62684]: DEBUG nova.virt.hardware [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1882.263423] env[62684]: DEBUG nova.virt.hardware [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1882.263725] env[62684]: DEBUG nova.virt.hardware [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1882.264147] env[62684]: DEBUG nova.virt.hardware [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1882.264564] env[62684]: DEBUG nova.virt.hardware [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1882.264874] env[62684]: DEBUG nova.virt.hardware [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1882.265879] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36d44be6-352a-4787-8e68-aff630672ceb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.276542] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a53a3e5-57ff-4715-a6b7-31d89ac1704a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.349396] env[62684]: DEBUG nova.network.neutron [-] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1882.370224] env[62684]: DEBUG oslo_vmware.api [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Task: {'id': task-2052754, 'name': ReconfigVM_Task, 'duration_secs': 0.291043} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1882.370224] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Reconfigured VM instance instance-0000002a to attach disk [datastore1] b945f05d-ef1c-4469-9390-f7bbd4f435f0/b945f05d-ef1c-4469-9390-f7bbd4f435f0.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1882.370851] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2bf700d9-0065-4c1c-bb9a-d5ec44146fdb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.382272] env[62684]: DEBUG oslo_vmware.api [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Waiting for the task: (returnval){ [ 1882.382272] env[62684]: value = "task-2052755" [ 1882.382272] env[62684]: _type = "Task" [ 1882.382272] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1882.395984] env[62684]: DEBUG oslo_vmware.api [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Task: {'id': task-2052755, 'name': Rename_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.484893] env[62684]: DEBUG nova.scheduler.client.report [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Updated inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f with generation 68 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1882.485215] env[62684]: DEBUG nova.compute.provider_tree [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Updating resource provider c23c281e-ec1f-4876-972e-a98655f2084f generation from 68 to 69 during operation: update_inventory {{(pid=62684) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1882.485408] env[62684]: DEBUG nova.compute.provider_tree [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1882.492745] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b7d40e89-e492-4d55-a633-7950cda4ce4b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1882.604109] env[62684]: DEBUG oslo_vmware.api [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]526511bd-432c-4f54-b025-c6e8d02a9079, 'name': SearchDatastore_Task, 'duration_secs': 0.012608} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1882.604109] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1882.604109] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 50bc9674-d19c-40f1-a89f-1738a1e48307/50bc9674-d19c-40f1-a89f-1738a1e48307.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1882.604109] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-35c52b07-27b1-4fd2-a236-6cdbd689ec22 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.610315] env[62684]: DEBUG oslo_vmware.api [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Waiting for the task: (returnval){ [ 1882.610315] env[62684]: value = "task-2052756" [ 1882.610315] env[62684]: _type = "Task" [ 1882.610315] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1882.622783] env[62684]: DEBUG oslo_vmware.api [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Task: {'id': task-2052756, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.697272] env[62684]: DEBUG nova.network.neutron [None req-f919698b-4cf5-4e1f-96bd-805d6b568dbd tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Updating instance_info_cache with network_info: [{"id": "de8de653-ec88-4a72-840c-27978f584581", "address": "fa:16:3e:49:29:8e", "network": {"id": "b71fbfa9-df50-40cb-95c3-272b6a724bc9", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-984806882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.225", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12b5d28eab2e49989d1e2f1a7e523eff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6eb7e3e9-5cc2-40f1-a6eb-f70f06531667", "external-id": "nsx-vlan-transportzone-938", "segmentation_id": 938, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde8de653-ec", "ovs_interfaceid": "de8de653-ec88-4a72-840c-27978f584581", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1882.859204] env[62684]: INFO nova.compute.manager [-] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Took 1.60 seconds to deallocate network for instance. [ 1882.862352] env[62684]: DEBUG nova.network.neutron [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Successfully updated port: 5d09e22d-005f-49a7-8c55-7d69dfd47687 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1882.900366] env[62684]: DEBUG oslo_vmware.api [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Task: {'id': task-2052755, 'name': Rename_Task, 'duration_secs': 0.142197} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1882.900366] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1882.900366] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2ddf983a-f65e-4fe5-97e9-387f11ca835c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.906704] env[62684]: DEBUG oslo_vmware.api [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Waiting for the task: (returnval){ [ 1882.906704] env[62684]: value = "task-2052757" [ 1882.906704] env[62684]: _type = "Task" [ 1882.906704] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1882.916167] env[62684]: DEBUG oslo_vmware.api [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Task: {'id': task-2052757, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.993286] env[62684]: DEBUG oslo_concurrency.lockutils [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.808s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1882.994103] env[62684]: DEBUG nova.compute.manager [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1882.997267] env[62684]: DEBUG oslo_concurrency.lockutils [None req-280129b5-0d5d-41eb-a325-508f42239ae5 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.048s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1882.997515] env[62684]: DEBUG nova.objects.instance [None req-280129b5-0d5d-41eb-a325-508f42239ae5 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Lazy-loading 'resources' on Instance uuid fb7f38a0-bcfa-4d96-bde3-20d6f1d70112 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1883.121588] env[62684]: DEBUG oslo_vmware.api [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Task: {'id': task-2052756, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.49283} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1883.121872] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 50bc9674-d19c-40f1-a89f-1738a1e48307/50bc9674-d19c-40f1-a89f-1738a1e48307.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1883.122217] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1883.122672] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-321f6fba-25d2-4779-a1da-2ba75e595197 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.128428] env[62684]: DEBUG oslo_vmware.api [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Waiting for the task: (returnval){ [ 1883.128428] env[62684]: value = "task-2052758" [ 1883.128428] env[62684]: _type = "Task" [ 1883.128428] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.136682] env[62684]: DEBUG oslo_vmware.api [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Task: {'id': task-2052758, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.201790] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f919698b-4cf5-4e1f-96bd-805d6b568dbd tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Releasing lock "refresh_cache-0dbd52ac-c987-4728-974e-73e99465c5e7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1883.202029] env[62684]: DEBUG nova.compute.manager [None req-f919698b-4cf5-4e1f-96bd-805d6b568dbd tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Inject network info {{(pid=62684) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7220}} [ 1883.202332] env[62684]: DEBUG nova.compute.manager [None req-f919698b-4cf5-4e1f-96bd-805d6b568dbd tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] network_info to inject: |[{"id": "de8de653-ec88-4a72-840c-27978f584581", "address": "fa:16:3e:49:29:8e", "network": {"id": "b71fbfa9-df50-40cb-95c3-272b6a724bc9", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-984806882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.225", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12b5d28eab2e49989d1e2f1a7e523eff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6eb7e3e9-5cc2-40f1-a6eb-f70f06531667", "external-id": "nsx-vlan-transportzone-938", "segmentation_id": 938, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde8de653-ec", "ovs_interfaceid": "de8de653-ec88-4a72-840c-27978f584581", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7221}} [ 1883.210303] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f919698b-4cf5-4e1f-96bd-805d6b568dbd tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Reconfiguring VM instance to set the machine id {{(pid=62684) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1883.210753] env[62684]: DEBUG oslo_concurrency.lockutils [req-96e07437-25b0-4686-9cdb-8d9290ad3b49 req-76e30ebf-1c15-4782-9a84-c45a34e70154 service nova] Acquired lock "refresh_cache-0dbd52ac-c987-4728-974e-73e99465c5e7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1883.210976] env[62684]: DEBUG nova.network.neutron [req-96e07437-25b0-4686-9cdb-8d9290ad3b49 req-76e30ebf-1c15-4782-9a84-c45a34e70154 service nova] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Refreshing network info cache for port de8de653-ec88-4a72-840c-27978f584581 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1883.212156] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-20af6ac4-9389-41fe-915c-4efa807f3843 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.223684] env[62684]: DEBUG nova.compute.manager [req-43a7dc77-2394-4aa0-bd6e-6154867d3d4f req-cc35856b-348b-471d-955a-cb457db064cc service nova] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Received event network-vif-deleted-c0047526-de96-4c14-8230-e69c53c790af {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1883.224322] env[62684]: DEBUG nova.compute.manager [req-43a7dc77-2394-4aa0-bd6e-6154867d3d4f req-cc35856b-348b-471d-955a-cb457db064cc service nova] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Received event network-vif-plugged-5d09e22d-005f-49a7-8c55-7d69dfd47687 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1883.224322] env[62684]: DEBUG oslo_concurrency.lockutils [req-43a7dc77-2394-4aa0-bd6e-6154867d3d4f req-cc35856b-348b-471d-955a-cb457db064cc service nova] Acquiring lock "ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1883.224322] env[62684]: DEBUG oslo_concurrency.lockutils [req-43a7dc77-2394-4aa0-bd6e-6154867d3d4f req-cc35856b-348b-471d-955a-cb457db064cc service nova] Lock "ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1883.224499] env[62684]: DEBUG oslo_concurrency.lockutils [req-43a7dc77-2394-4aa0-bd6e-6154867d3d4f req-cc35856b-348b-471d-955a-cb457db064cc service nova] Lock "ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1883.224634] env[62684]: DEBUG nova.compute.manager [req-43a7dc77-2394-4aa0-bd6e-6154867d3d4f req-cc35856b-348b-471d-955a-cb457db064cc service nova] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] No waiting events found dispatching network-vif-plugged-5d09e22d-005f-49a7-8c55-7d69dfd47687 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1883.225093] env[62684]: WARNING nova.compute.manager [req-43a7dc77-2394-4aa0-bd6e-6154867d3d4f req-cc35856b-348b-471d-955a-cb457db064cc service nova] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Received unexpected event network-vif-plugged-5d09e22d-005f-49a7-8c55-7d69dfd47687 for instance with vm_state building and task_state spawning. [ 1883.225093] env[62684]: DEBUG nova.compute.manager [req-43a7dc77-2394-4aa0-bd6e-6154867d3d4f req-cc35856b-348b-471d-955a-cb457db064cc service nova] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Received event network-changed-5d09e22d-005f-49a7-8c55-7d69dfd47687 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1883.225213] env[62684]: DEBUG nova.compute.manager [req-43a7dc77-2394-4aa0-bd6e-6154867d3d4f req-cc35856b-348b-471d-955a-cb457db064cc service nova] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Refreshing instance network info cache due to event network-changed-5d09e22d-005f-49a7-8c55-7d69dfd47687. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1883.225445] env[62684]: DEBUG oslo_concurrency.lockutils [req-43a7dc77-2394-4aa0-bd6e-6154867d3d4f req-cc35856b-348b-471d-955a-cb457db064cc service nova] Acquiring lock "refresh_cache-ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1883.225445] env[62684]: DEBUG oslo_concurrency.lockutils [req-43a7dc77-2394-4aa0-bd6e-6154867d3d4f req-cc35856b-348b-471d-955a-cb457db064cc service nova] Acquired lock "refresh_cache-ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1883.225592] env[62684]: DEBUG nova.network.neutron [req-43a7dc77-2394-4aa0-bd6e-6154867d3d4f req-cc35856b-348b-471d-955a-cb457db064cc service nova] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Refreshing network info cache for port 5d09e22d-005f-49a7-8c55-7d69dfd47687 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1883.235060] env[62684]: DEBUG oslo_vmware.api [None req-f919698b-4cf5-4e1f-96bd-805d6b568dbd tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Waiting for the task: (returnval){ [ 1883.235060] env[62684]: value = "task-2052759" [ 1883.235060] env[62684]: _type = "Task" [ 1883.235060] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.243329] env[62684]: DEBUG oslo_vmware.api [None req-f919698b-4cf5-4e1f-96bd-805d6b568dbd tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Task: {'id': task-2052759, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.367998] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "refresh_cache-ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1883.417257] env[62684]: DEBUG oslo_vmware.api [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Task: {'id': task-2052757, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.420425] env[62684]: INFO nova.compute.manager [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Took 0.56 seconds to detach 1 volumes for instance. [ 1883.423105] env[62684]: DEBUG nova.compute.manager [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Deleting volume: 34523d13-ed90-416e-a19a-57c837136d21 {{(pid=62684) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3248}} [ 1883.510287] env[62684]: DEBUG nova.compute.utils [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1883.512524] env[62684]: DEBUG nova.compute.manager [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1883.512694] env[62684]: DEBUG nova.network.neutron [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1883.566928] env[62684]: DEBUG nova.policy [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '15ae7a383f294208909e3763b5429340', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bd812751722143fabedfa986a2d98b59', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1883.572543] env[62684]: DEBUG nova.objects.instance [None req-56241456-c082-4fa4-8424-e63af1f7d9ed tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Lazy-loading 'flavor' on Instance uuid 0dbd52ac-c987-4728-974e-73e99465c5e7 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1883.642427] env[62684]: DEBUG oslo_vmware.api [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Task: {'id': task-2052758, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067538} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1883.642427] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1883.643454] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b246996-dca2-428a-8fdf-3d623a9e8294 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.670873] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Reconfiguring VM instance instance-0000002b to attach disk [datastore1] 50bc9674-d19c-40f1-a89f-1738a1e48307/50bc9674-d19c-40f1-a89f-1738a1e48307.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1883.674635] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f75385fb-a1e4-441a-a05f-53ad75ceef95 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.695768] env[62684]: DEBUG oslo_vmware.api [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Waiting for the task: (returnval){ [ 1883.695768] env[62684]: value = "task-2052761" [ 1883.695768] env[62684]: _type = "Task" [ 1883.695768] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.708407] env[62684]: DEBUG oslo_vmware.api [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Task: {'id': task-2052761, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.746015] env[62684]: DEBUG oslo_vmware.api [None req-f919698b-4cf5-4e1f-96bd-805d6b568dbd tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Task: {'id': task-2052759, 'name': ReconfigVM_Task, 'duration_secs': 0.157163} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1883.746077] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f919698b-4cf5-4e1f-96bd-805d6b568dbd tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Reconfigured VM instance to set the machine id {{(pid=62684) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1883.778908] env[62684]: DEBUG nova.network.neutron [req-43a7dc77-2394-4aa0-bd6e-6154867d3d4f req-cc35856b-348b-471d-955a-cb457db064cc service nova] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1883.920728] env[62684]: DEBUG oslo_vmware.api [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Task: {'id': task-2052757, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.924363] env[62684]: DEBUG nova.network.neutron [req-43a7dc77-2394-4aa0-bd6e-6154867d3d4f req-cc35856b-348b-471d-955a-cb457db064cc service nova] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1883.969673] env[62684]: DEBUG oslo_concurrency.lockutils [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1883.984811] env[62684]: DEBUG nova.network.neutron [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Successfully created port: 35a0f9ef-b68c-43df-8887-6c35257bbc58 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1884.013622] env[62684]: DEBUG nova.compute.manager [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1884.083200] env[62684]: DEBUG oslo_concurrency.lockutils [None req-56241456-c082-4fa4-8424-e63af1f7d9ed tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Acquiring lock "refresh_cache-0dbd52ac-c987-4728-974e-73e99465c5e7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1884.160500] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e5646f0-1b0d-45fc-b746-46ceda0ff5dc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.169266] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01f5b7dd-f982-4b61-b7b8-3193de628c4b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.203386] env[62684]: DEBUG nova.network.neutron [req-96e07437-25b0-4686-9cdb-8d9290ad3b49 req-76e30ebf-1c15-4782-9a84-c45a34e70154 service nova] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Updated VIF entry in instance network info cache for port de8de653-ec88-4a72-840c-27978f584581. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1884.203875] env[62684]: DEBUG nova.network.neutron [req-96e07437-25b0-4686-9cdb-8d9290ad3b49 req-76e30ebf-1c15-4782-9a84-c45a34e70154 service nova] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Updating instance_info_cache with network_info: [{"id": "de8de653-ec88-4a72-840c-27978f584581", "address": "fa:16:3e:49:29:8e", "network": {"id": "b71fbfa9-df50-40cb-95c3-272b6a724bc9", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-984806882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.225", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12b5d28eab2e49989d1e2f1a7e523eff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6eb7e3e9-5cc2-40f1-a6eb-f70f06531667", "external-id": "nsx-vlan-transportzone-938", "segmentation_id": 938, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde8de653-ec", "ovs_interfaceid": "de8de653-ec88-4a72-840c-27978f584581", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1884.208966] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88e64789-32a3-4a7f-8f4f-516c8348691f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.220081] env[62684]: DEBUG oslo_vmware.api [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Task: {'id': task-2052761, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.225365] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ff7e706-ddbc-43e9-8e41-99a04c1e4b64 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.242081] env[62684]: DEBUG nova.compute.provider_tree [None req-280129b5-0d5d-41eb-a325-508f42239ae5 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1884.418398] env[62684]: DEBUG oslo_vmware.api [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Task: {'id': task-2052757, 'name': PowerOnVM_Task, 'duration_secs': 1.461176} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1884.418744] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1884.418895] env[62684]: INFO nova.compute.manager [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Took 10.85 seconds to spawn the instance on the hypervisor. [ 1884.419096] env[62684]: DEBUG nova.compute.manager [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1884.419910] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0620385b-daed-4a13-b046-2ef8372f5d09 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.433026] env[62684]: DEBUG oslo_concurrency.lockutils [req-43a7dc77-2394-4aa0-bd6e-6154867d3d4f req-cc35856b-348b-471d-955a-cb457db064cc service nova] Releasing lock "refresh_cache-ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1884.433026] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquired lock "refresh_cache-ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1884.433026] env[62684]: DEBUG nova.network.neutron [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1884.715031] env[62684]: DEBUG oslo_concurrency.lockutils [req-96e07437-25b0-4686-9cdb-8d9290ad3b49 req-76e30ebf-1c15-4782-9a84-c45a34e70154 service nova] Releasing lock "refresh_cache-0dbd52ac-c987-4728-974e-73e99465c5e7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1884.715311] env[62684]: DEBUG oslo_vmware.api [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Task: {'id': task-2052761, 'name': ReconfigVM_Task, 'duration_secs': 0.71411} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1884.715547] env[62684]: DEBUG oslo_concurrency.lockutils [None req-56241456-c082-4fa4-8424-e63af1f7d9ed tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Acquired lock "refresh_cache-0dbd52ac-c987-4728-974e-73e99465c5e7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1884.717330] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Reconfigured VM instance instance-0000002b to attach disk [datastore1] 50bc9674-d19c-40f1-a89f-1738a1e48307/50bc9674-d19c-40f1-a89f-1738a1e48307.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1884.717951] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cd50520c-0871-41e9-97fc-05ff7af5f77a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.725083] env[62684]: DEBUG oslo_vmware.api [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Waiting for the task: (returnval){ [ 1884.725083] env[62684]: value = "task-2052762" [ 1884.725083] env[62684]: _type = "Task" [ 1884.725083] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1884.732916] env[62684]: DEBUG oslo_vmware.api [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Task: {'id': task-2052762, 'name': Rename_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.745156] env[62684]: DEBUG nova.scheduler.client.report [None req-280129b5-0d5d-41eb-a325-508f42239ae5 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1884.943478] env[62684]: INFO nova.compute.manager [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Took 45.09 seconds to build instance. [ 1884.986867] env[62684]: DEBUG nova.network.neutron [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1885.031213] env[62684]: DEBUG nova.compute.manager [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1885.055232] env[62684]: DEBUG oslo_concurrency.lockutils [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Acquiring lock "a3c7943e-7528-41bc-9a20-1e2b57f832e3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1885.055232] env[62684]: DEBUG oslo_concurrency.lockutils [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Lock "a3c7943e-7528-41bc-9a20-1e2b57f832e3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1885.068369] env[62684]: DEBUG nova.virt.hardware [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1885.068677] env[62684]: DEBUG nova.virt.hardware [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1885.068853] env[62684]: DEBUG nova.virt.hardware [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1885.069058] env[62684]: DEBUG nova.virt.hardware [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1885.069217] env[62684]: DEBUG nova.virt.hardware [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1885.069373] env[62684]: DEBUG nova.virt.hardware [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1885.069587] env[62684]: DEBUG nova.virt.hardware [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1885.069781] env[62684]: DEBUG nova.virt.hardware [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1885.069981] env[62684]: DEBUG nova.virt.hardware [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1885.070509] env[62684]: DEBUG nova.virt.hardware [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1885.070738] env[62684]: DEBUG nova.virt.hardware [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1885.072485] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cfbefc1-9b79-469c-a83b-c92bb75430df {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.081995] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7dbc8e6-c0d3-4aeb-a5ef-01cd02586323 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.175709] env[62684]: DEBUG nova.network.neutron [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Updating instance_info_cache with network_info: [{"id": "5d09e22d-005f-49a7-8c55-7d69dfd47687", "address": "fa:16:3e:fe:35:e7", "network": {"id": "aa52badb-0b73-48bc-afaa-5e06a97d5c7d", "bridge": "br-int", "label": "tempest-ServersTestJSON-556342067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c54f74085f343d2b790145b0d82a9f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d09e22d-00", "ovs_interfaceid": "5d09e22d-005f-49a7-8c55-7d69dfd47687", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1885.235412] env[62684]: DEBUG oslo_vmware.api [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Task: {'id': task-2052762, 'name': Rename_Task, 'duration_secs': 0.30556} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1885.236897] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1885.237213] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1e6d685e-a13f-4ce5-96c9-fefff649b4bc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.243304] env[62684]: DEBUG oslo_vmware.api [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Waiting for the task: (returnval){ [ 1885.243304] env[62684]: value = "task-2052763" [ 1885.243304] env[62684]: _type = "Task" [ 1885.243304] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1885.251938] env[62684]: DEBUG oslo_concurrency.lockutils [None req-280129b5-0d5d-41eb-a325-508f42239ae5 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.255s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1885.253968] env[62684]: DEBUG oslo_vmware.api [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Task: {'id': task-2052763, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.254498] env[62684]: DEBUG oslo_concurrency.lockutils [None req-83b12138-2343-421d-a96e-3bec366690c7 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.205s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1885.254772] env[62684]: DEBUG nova.objects.instance [None req-83b12138-2343-421d-a96e-3bec366690c7 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Lazy-loading 'resources' on Instance uuid d06f3099-d05f-417f-a71a-7b368590624f {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1885.284540] env[62684]: INFO nova.scheduler.client.report [None req-280129b5-0d5d-41eb-a325-508f42239ae5 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Deleted allocations for instance fb7f38a0-bcfa-4d96-bde3-20d6f1d70112 [ 1885.326340] env[62684]: DEBUG nova.network.neutron [None req-56241456-c082-4fa4-8424-e63af1f7d9ed tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1885.349704] env[62684]: DEBUG nova.compute.manager [req-8e8ad003-5d75-4090-baa3-8642b724dd80 req-d3716c13-8ba7-4ba0-87d3-651c29935dcc service nova] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Received event network-changed-de8de653-ec88-4a72-840c-27978f584581 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1885.349876] env[62684]: DEBUG nova.compute.manager [req-8e8ad003-5d75-4090-baa3-8642b724dd80 req-d3716c13-8ba7-4ba0-87d3-651c29935dcc service nova] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Refreshing instance network info cache due to event network-changed-de8de653-ec88-4a72-840c-27978f584581. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1885.350095] env[62684]: DEBUG oslo_concurrency.lockutils [req-8e8ad003-5d75-4090-baa3-8642b724dd80 req-d3716c13-8ba7-4ba0-87d3-651c29935dcc service nova] Acquiring lock "refresh_cache-0dbd52ac-c987-4728-974e-73e99465c5e7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1885.446681] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4843141e-a13b-4e77-9675-efc8e4bdf6ce tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Lock "b945f05d-ef1c-4469-9390-f7bbd4f435f0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 93.517s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1885.561267] env[62684]: DEBUG nova.network.neutron [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Successfully updated port: 35a0f9ef-b68c-43df-8887-6c35257bbc58 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1885.571493] env[62684]: DEBUG nova.compute.manager [req-9a533ec7-a3d1-4502-9132-a1431da70871 req-d0304895-5695-4545-a912-c90e908f8c12 service nova] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Received event network-vif-plugged-35a0f9ef-b68c-43df-8887-6c35257bbc58 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1885.571954] env[62684]: DEBUG oslo_concurrency.lockutils [req-9a533ec7-a3d1-4502-9132-a1431da70871 req-d0304895-5695-4545-a912-c90e908f8c12 service nova] Acquiring lock "b1f70e39-bf37-4fb8-b95b-653b59bec265-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1885.572042] env[62684]: DEBUG oslo_concurrency.lockutils [req-9a533ec7-a3d1-4502-9132-a1431da70871 req-d0304895-5695-4545-a912-c90e908f8c12 service nova] Lock "b1f70e39-bf37-4fb8-b95b-653b59bec265-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1885.572176] env[62684]: DEBUG oslo_concurrency.lockutils [req-9a533ec7-a3d1-4502-9132-a1431da70871 req-d0304895-5695-4545-a912-c90e908f8c12 service nova] Lock "b1f70e39-bf37-4fb8-b95b-653b59bec265-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1885.572369] env[62684]: DEBUG nova.compute.manager [req-9a533ec7-a3d1-4502-9132-a1431da70871 req-d0304895-5695-4545-a912-c90e908f8c12 service nova] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] No waiting events found dispatching network-vif-plugged-35a0f9ef-b68c-43df-8887-6c35257bbc58 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1885.572583] env[62684]: WARNING nova.compute.manager [req-9a533ec7-a3d1-4502-9132-a1431da70871 req-d0304895-5695-4545-a912-c90e908f8c12 service nova] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Received unexpected event network-vif-plugged-35a0f9ef-b68c-43df-8887-6c35257bbc58 for instance with vm_state building and task_state spawning. [ 1885.678473] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Releasing lock "refresh_cache-ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1885.679093] env[62684]: DEBUG nova.compute.manager [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Instance network_info: |[{"id": "5d09e22d-005f-49a7-8c55-7d69dfd47687", "address": "fa:16:3e:fe:35:e7", "network": {"id": "aa52badb-0b73-48bc-afaa-5e06a97d5c7d", "bridge": "br-int", "label": "tempest-ServersTestJSON-556342067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c54f74085f343d2b790145b0d82a9f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d09e22d-00", "ovs_interfaceid": "5d09e22d-005f-49a7-8c55-7d69dfd47687", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1885.679363] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:35:e7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1fb81f98-6f5a-47ab-a512-27277591d064', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5d09e22d-005f-49a7-8c55-7d69dfd47687', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1885.687315] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Creating folder: Project (6c54f74085f343d2b790145b0d82a9f8). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1885.687658] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-53590dd4-5f2a-42a0-9c21-37ab522bfea6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.700221] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Created folder: Project (6c54f74085f343d2b790145b0d82a9f8) in parent group-v421118. [ 1885.700433] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Creating folder: Instances. Parent ref: group-v421250. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1885.700718] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0c5a6aed-a81e-4a8e-8823-84de79d47eae {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.710690] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Created folder: Instances in parent group-v421250. [ 1885.711101] env[62684]: DEBUG oslo.service.loopingcall [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1885.711189] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1885.711629] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-49d80944-f881-459b-89e2-b5f0648798b2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.731978] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1885.731978] env[62684]: value = "task-2052766" [ 1885.731978] env[62684]: _type = "Task" [ 1885.731978] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1885.740212] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052766, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.753785] env[62684]: DEBUG oslo_vmware.api [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Task: {'id': task-2052763, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.799495] env[62684]: DEBUG oslo_concurrency.lockutils [None req-280129b5-0d5d-41eb-a325-508f42239ae5 tempest-ServersAdminNegativeTestJSON-1773615093 tempest-ServersAdminNegativeTestJSON-1773615093-project-member] Lock "fb7f38a0-bcfa-4d96-bde3-20d6f1d70112" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.795s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1885.801303] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "fb7f38a0-bcfa-4d96-bde3-20d6f1d70112" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 30.456s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1885.801755] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2c7717e0-bd59-4a4a-bd0a-1bcd9817f387 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.814198] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6edf88f5-e97d-43e4-ae22-a081f3b941bf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.952958] env[62684]: DEBUG nova.compute.manager [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1886.064089] env[62684]: DEBUG oslo_concurrency.lockutils [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquiring lock "refresh_cache-b1f70e39-bf37-4fb8-b95b-653b59bec265" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1886.064420] env[62684]: DEBUG oslo_concurrency.lockutils [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquired lock "refresh_cache-b1f70e39-bf37-4fb8-b95b-653b59bec265" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1886.064676] env[62684]: DEBUG nova.network.neutron [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1886.214278] env[62684]: DEBUG nova.network.neutron [None req-56241456-c082-4fa4-8424-e63af1f7d9ed tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Updating instance_info_cache with network_info: [{"id": "de8de653-ec88-4a72-840c-27978f584581", "address": "fa:16:3e:49:29:8e", "network": {"id": "b71fbfa9-df50-40cb-95c3-272b6a724bc9", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-984806882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.225", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12b5d28eab2e49989d1e2f1a7e523eff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6eb7e3e9-5cc2-40f1-a6eb-f70f06531667", "external-id": "nsx-vlan-transportzone-938", "segmentation_id": 938, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde8de653-ec", "ovs_interfaceid": "de8de653-ec88-4a72-840c-27978f584581", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1886.242623] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052766, 'name': CreateVM_Task, 'duration_secs': 0.361363} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.245065] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1886.246025] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1886.246272] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1886.246524] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1886.249561] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7fc9af05-3b12-46fb-8a47-2ec8dc908a05 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.254281] env[62684]: DEBUG oslo_vmware.api [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 1886.254281] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5270c3c2-e3e3-c896-81e0-4f8f3e384ec4" [ 1886.254281] env[62684]: _type = "Task" [ 1886.254281] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.257514] env[62684]: DEBUG oslo_vmware.api [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Task: {'id': task-2052763, 'name': PowerOnVM_Task, 'duration_secs': 0.651993} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.263813] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1886.264073] env[62684]: INFO nova.compute.manager [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Took 10.06 seconds to spawn the instance on the hypervisor. [ 1886.264275] env[62684]: DEBUG nova.compute.manager [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1886.265443] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0101aab3-a87d-4d3b-a1b4-20e5f1728168 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.273699] env[62684]: DEBUG oslo_vmware.api [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5270c3c2-e3e3-c896-81e0-4f8f3e384ec4, 'name': SearchDatastore_Task, 'duration_secs': 0.012257} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.277217] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1886.277456] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1886.277690] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1886.277845] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1886.278062] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1886.281201] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-07486322-f681-4ab9-9c64-5550cbab2e95 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.289144] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1886.289393] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1886.290276] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74686d79-c831-4c2e-9a22-19e28cc79a6b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.299478] env[62684]: DEBUG oslo_vmware.api [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 1886.299478] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52daba9d-d8f6-3840-e461-63cd44af0d64" [ 1886.299478] env[62684]: _type = "Task" [ 1886.299478] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.306472] env[62684]: DEBUG oslo_vmware.api [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52daba9d-d8f6-3840-e461-63cd44af0d64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.355268] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b890445-3025-4f71-aa28-c65ba8a1b62a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.363898] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e34217dd-fec2-45e1-ae01-3e1dbe7029db {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.369954] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "fb7f38a0-bcfa-4d96-bde3-20d6f1d70112" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.569s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1886.370322] env[62684]: DEBUG oslo_concurrency.lockutils [None req-29bb95d8-08e7-4b34-8dfc-140f378a8598 tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Acquiring lock "b945f05d-ef1c-4469-9390-f7bbd4f435f0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1886.370574] env[62684]: DEBUG oslo_concurrency.lockutils [None req-29bb95d8-08e7-4b34-8dfc-140f378a8598 tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Lock "b945f05d-ef1c-4469-9390-f7bbd4f435f0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1886.370858] env[62684]: DEBUG oslo_concurrency.lockutils [None req-29bb95d8-08e7-4b34-8dfc-140f378a8598 tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Acquiring lock "b945f05d-ef1c-4469-9390-f7bbd4f435f0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1886.371123] env[62684]: DEBUG oslo_concurrency.lockutils [None req-29bb95d8-08e7-4b34-8dfc-140f378a8598 tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Lock "b945f05d-ef1c-4469-9390-f7bbd4f435f0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1886.371339] env[62684]: DEBUG oslo_concurrency.lockutils [None req-29bb95d8-08e7-4b34-8dfc-140f378a8598 tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Lock "b945f05d-ef1c-4469-9390-f7bbd4f435f0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1886.397082] env[62684]: INFO nova.compute.manager [None req-29bb95d8-08e7-4b34-8dfc-140f378a8598 tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Terminating instance [ 1886.399085] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61338e01-1fbf-49e8-8a06-d90da8bcfba4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.402066] env[62684]: DEBUG nova.compute.manager [None req-29bb95d8-08e7-4b34-8dfc-140f378a8598 tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1886.402324] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-29bb95d8-08e7-4b34-8dfc-140f378a8598 tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1886.403093] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9214e6c1-7b8d-4d76-aad2-05a81c651d63 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.413721] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-009170d2-2816-413c-a5e6-25a8293d676f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.417425] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-29bb95d8-08e7-4b34-8dfc-140f378a8598 tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1886.417694] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-876bf056-87fa-49be-a787-f0f033191d94 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.429641] env[62684]: DEBUG nova.compute.provider_tree [None req-83b12138-2343-421d-a96e-3bec366690c7 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1886.432473] env[62684]: DEBUG oslo_vmware.api [None req-29bb95d8-08e7-4b34-8dfc-140f378a8598 tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Waiting for the task: (returnval){ [ 1886.432473] env[62684]: value = "task-2052767" [ 1886.432473] env[62684]: _type = "Task" [ 1886.432473] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.439953] env[62684]: DEBUG oslo_vmware.api [None req-29bb95d8-08e7-4b34-8dfc-140f378a8598 tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Task: {'id': task-2052767, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.472236] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1886.599873] env[62684]: DEBUG nova.network.neutron [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1886.717477] env[62684]: DEBUG oslo_concurrency.lockutils [None req-56241456-c082-4fa4-8424-e63af1f7d9ed tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Releasing lock "refresh_cache-0dbd52ac-c987-4728-974e-73e99465c5e7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1886.719067] env[62684]: DEBUG nova.compute.manager [None req-56241456-c082-4fa4-8424-e63af1f7d9ed tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Inject network info {{(pid=62684) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7220}} [ 1886.719067] env[62684]: DEBUG nova.compute.manager [None req-56241456-c082-4fa4-8424-e63af1f7d9ed tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] network_info to inject: |[{"id": "de8de653-ec88-4a72-840c-27978f584581", "address": "fa:16:3e:49:29:8e", "network": {"id": "b71fbfa9-df50-40cb-95c3-272b6a724bc9", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-984806882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.225", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12b5d28eab2e49989d1e2f1a7e523eff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6eb7e3e9-5cc2-40f1-a6eb-f70f06531667", "external-id": "nsx-vlan-transportzone-938", "segmentation_id": 938, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde8de653-ec", "ovs_interfaceid": "de8de653-ec88-4a72-840c-27978f584581", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7221}} [ 1886.722595] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-56241456-c082-4fa4-8424-e63af1f7d9ed tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Reconfiguring VM instance to set the machine id {{(pid=62684) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1886.722910] env[62684]: DEBUG oslo_concurrency.lockutils [req-8e8ad003-5d75-4090-baa3-8642b724dd80 req-d3716c13-8ba7-4ba0-87d3-651c29935dcc service nova] Acquired lock "refresh_cache-0dbd52ac-c987-4728-974e-73e99465c5e7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1886.723157] env[62684]: DEBUG nova.network.neutron [req-8e8ad003-5d75-4090-baa3-8642b724dd80 req-d3716c13-8ba7-4ba0-87d3-651c29935dcc service nova] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Refreshing network info cache for port de8de653-ec88-4a72-840c-27978f584581 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1886.724336] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-57db6be4-b208-4708-8546-e818756b0f7c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.741209] env[62684]: DEBUG oslo_vmware.api [None req-56241456-c082-4fa4-8424-e63af1f7d9ed tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Waiting for the task: (returnval){ [ 1886.741209] env[62684]: value = "task-2052768" [ 1886.741209] env[62684]: _type = "Task" [ 1886.741209] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.745056] env[62684]: DEBUG nova.network.neutron [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Updating instance_info_cache with network_info: [{"id": "35a0f9ef-b68c-43df-8887-6c35257bbc58", "address": "fa:16:3e:7a:c4:6c", "network": {"id": "bd253713-4e81-4c94-9689-22b81e7f51b6", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-307001665-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd812751722143fabedfa986a2d98b59", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35a0f9ef-b6", "ovs_interfaceid": "35a0f9ef-b68c-43df-8887-6c35257bbc58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1886.753324] env[62684]: DEBUG oslo_vmware.api [None req-56241456-c082-4fa4-8424-e63af1f7d9ed tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Task: {'id': task-2052768, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.790338] env[62684]: INFO nova.compute.manager [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Took 45.75 seconds to build instance. [ 1886.808455] env[62684]: DEBUG oslo_vmware.api [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52daba9d-d8f6-3840-e461-63cd44af0d64, 'name': SearchDatastore_Task, 'duration_secs': 0.010124} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.809812] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a36427c-4a89-4a94-9362-7bd4890e6868 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.815430] env[62684]: DEBUG oslo_vmware.api [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 1886.815430] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5272a965-a808-ad29-0ced-b54e4da2ef51" [ 1886.815430] env[62684]: _type = "Task" [ 1886.815430] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.823555] env[62684]: DEBUG oslo_vmware.api [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5272a965-a808-ad29-0ced-b54e4da2ef51, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.932482] env[62684]: DEBUG nova.scheduler.client.report [None req-83b12138-2343-421d-a96e-3bec366690c7 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1886.949850] env[62684]: DEBUG oslo_vmware.api [None req-29bb95d8-08e7-4b34-8dfc-140f378a8598 tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Task: {'id': task-2052767, 'name': PowerOffVM_Task, 'duration_secs': 0.253388} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.950170] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-29bb95d8-08e7-4b34-8dfc-140f378a8598 tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1886.950345] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-29bb95d8-08e7-4b34-8dfc-140f378a8598 tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1886.950611] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0d4c8743-a88d-4703-921d-b368f2a0afb9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.030024] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-29bb95d8-08e7-4b34-8dfc-140f378a8598 tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1887.030303] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-29bb95d8-08e7-4b34-8dfc-140f378a8598 tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1887.030491] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-29bb95d8-08e7-4b34-8dfc-140f378a8598 tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Deleting the datastore file [datastore1] b945f05d-ef1c-4469-9390-f7bbd4f435f0 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1887.030775] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-abcee47f-a9c5-4c47-b00e-ce5fb0e8b2d7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.036809] env[62684]: DEBUG oslo_vmware.api [None req-29bb95d8-08e7-4b34-8dfc-140f378a8598 tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Waiting for the task: (returnval){ [ 1887.036809] env[62684]: value = "task-2052770" [ 1887.036809] env[62684]: _type = "Task" [ 1887.036809] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.044801] env[62684]: DEBUG oslo_vmware.api [None req-29bb95d8-08e7-4b34-8dfc-140f378a8598 tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Task: {'id': task-2052770, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.250719] env[62684]: DEBUG oslo_concurrency.lockutils [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Releasing lock "refresh_cache-b1f70e39-bf37-4fb8-b95b-653b59bec265" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1887.254021] env[62684]: DEBUG nova.compute.manager [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Instance network_info: |[{"id": "35a0f9ef-b68c-43df-8887-6c35257bbc58", "address": "fa:16:3e:7a:c4:6c", "network": {"id": "bd253713-4e81-4c94-9689-22b81e7f51b6", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-307001665-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd812751722143fabedfa986a2d98b59", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35a0f9ef-b6", "ovs_interfaceid": "35a0f9ef-b68c-43df-8887-6c35257bbc58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1887.254180] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7a:c4:6c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4055505f-97ab-400b-969c-43d99b38fd48', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '35a0f9ef-b68c-43df-8887-6c35257bbc58', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1887.259606] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Creating folder: Project (bd812751722143fabedfa986a2d98b59). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1887.261285] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0105aaed-c3fb-450a-82d3-d954ea2a046b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.266554] env[62684]: DEBUG oslo_vmware.api [None req-56241456-c082-4fa4-8424-e63af1f7d9ed tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Task: {'id': task-2052768, 'name': ReconfigVM_Task, 'duration_secs': 0.157662} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.266554] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-56241456-c082-4fa4-8424-e63af1f7d9ed tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Reconfigured VM instance to set the machine id {{(pid=62684) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1887.276323] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Created folder: Project (bd812751722143fabedfa986a2d98b59) in parent group-v421118. [ 1887.276323] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Creating folder: Instances. Parent ref: group-v421253. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1887.276323] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-54bdbc69-ab84-4a70-9381-38839d26db58 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.285318] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Created folder: Instances in parent group-v421253. [ 1887.285592] env[62684]: DEBUG oslo.service.loopingcall [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1887.287542] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1887.287542] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ffe1ff57-2dd6-4e8e-9e7d-00a3ce0c4938 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.301825] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d1c1a56f-c322-4647-8e4a-f9cb0792d3f6 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Lock "50bc9674-d19c-40f1-a89f-1738a1e48307" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.283s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1887.309321] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1887.309321] env[62684]: value = "task-2052773" [ 1887.309321] env[62684]: _type = "Task" [ 1887.309321] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.318149] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052773, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.327600] env[62684]: DEBUG oslo_vmware.api [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5272a965-a808-ad29-0ced-b54e4da2ef51, 'name': SearchDatastore_Task, 'duration_secs': 0.022569} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.327940] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1887.328164] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1/ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1887.328433] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-862e2272-3c6e-4100-bb69-db787c48c220 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.337052] env[62684]: DEBUG oslo_vmware.api [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 1887.337052] env[62684]: value = "task-2052774" [ 1887.337052] env[62684]: _type = "Task" [ 1887.337052] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.345465] env[62684]: DEBUG oslo_vmware.api [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2052774, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.424802] env[62684]: DEBUG oslo_concurrency.lockutils [None req-18874118-bb2d-4984-9f2a-fca1202c8b3b tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Acquiring lock "0dbd52ac-c987-4728-974e-73e99465c5e7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1887.425120] env[62684]: DEBUG oslo_concurrency.lockutils [None req-18874118-bb2d-4984-9f2a-fca1202c8b3b tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Lock "0dbd52ac-c987-4728-974e-73e99465c5e7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1887.425349] env[62684]: DEBUG oslo_concurrency.lockutils [None req-18874118-bb2d-4984-9f2a-fca1202c8b3b tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Acquiring lock "0dbd52ac-c987-4728-974e-73e99465c5e7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1887.425538] env[62684]: DEBUG oslo_concurrency.lockutils [None req-18874118-bb2d-4984-9f2a-fca1202c8b3b tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Lock "0dbd52ac-c987-4728-974e-73e99465c5e7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1887.425773] env[62684]: DEBUG oslo_concurrency.lockutils [None req-18874118-bb2d-4984-9f2a-fca1202c8b3b tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Lock "0dbd52ac-c987-4728-974e-73e99465c5e7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1887.430044] env[62684]: INFO nova.compute.manager [None req-18874118-bb2d-4984-9f2a-fca1202c8b3b tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Terminating instance [ 1887.434818] env[62684]: DEBUG nova.compute.manager [None req-18874118-bb2d-4984-9f2a-fca1202c8b3b tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1887.435040] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-18874118-bb2d-4984-9f2a-fca1202c8b3b tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1887.435933] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82122f59-37d0-49bf-b5eb-3413fd82556f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.439263] env[62684]: DEBUG oslo_concurrency.lockutils [None req-83b12138-2343-421d-a96e-3bec366690c7 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.185s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1887.442671] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 34.545s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1887.442880] env[62684]: DEBUG nova.objects.instance [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62684) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1887.450381] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-18874118-bb2d-4984-9f2a-fca1202c8b3b tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1887.450649] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-22db771d-fef8-4132-ab37-46d9364e78d1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.459476] env[62684]: DEBUG oslo_vmware.api [None req-18874118-bb2d-4984-9f2a-fca1202c8b3b tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Waiting for the task: (returnval){ [ 1887.459476] env[62684]: value = "task-2052775" [ 1887.459476] env[62684]: _type = "Task" [ 1887.459476] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.474226] env[62684]: DEBUG oslo_vmware.api [None req-18874118-bb2d-4984-9f2a-fca1202c8b3b tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Task: {'id': task-2052775, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.475390] env[62684]: INFO nova.scheduler.client.report [None req-83b12138-2343-421d-a96e-3bec366690c7 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Deleted allocations for instance d06f3099-d05f-417f-a71a-7b368590624f [ 1887.548583] env[62684]: DEBUG oslo_vmware.api [None req-29bb95d8-08e7-4b34-8dfc-140f378a8598 tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Task: {'id': task-2052770, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.1364} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.548892] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-29bb95d8-08e7-4b34-8dfc-140f378a8598 tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1887.549121] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-29bb95d8-08e7-4b34-8dfc-140f378a8598 tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1887.549315] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-29bb95d8-08e7-4b34-8dfc-140f378a8598 tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1887.549559] env[62684]: INFO nova.compute.manager [None req-29bb95d8-08e7-4b34-8dfc-140f378a8598 tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1887.549861] env[62684]: DEBUG oslo.service.loopingcall [None req-29bb95d8-08e7-4b34-8dfc-140f378a8598 tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1887.549957] env[62684]: DEBUG nova.compute.manager [-] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1887.550097] env[62684]: DEBUG nova.network.neutron [-] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1887.661955] env[62684]: DEBUG nova.network.neutron [req-8e8ad003-5d75-4090-baa3-8642b724dd80 req-d3716c13-8ba7-4ba0-87d3-651c29935dcc service nova] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Updated VIF entry in instance network info cache for port de8de653-ec88-4a72-840c-27978f584581. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1887.662584] env[62684]: DEBUG nova.network.neutron [req-8e8ad003-5d75-4090-baa3-8642b724dd80 req-d3716c13-8ba7-4ba0-87d3-651c29935dcc service nova] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Updating instance_info_cache with network_info: [{"id": "de8de653-ec88-4a72-840c-27978f584581", "address": "fa:16:3e:49:29:8e", "network": {"id": "b71fbfa9-df50-40cb-95c3-272b6a724bc9", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-984806882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.225", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12b5d28eab2e49989d1e2f1a7e523eff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6eb7e3e9-5cc2-40f1-a6eb-f70f06531667", "external-id": "nsx-vlan-transportzone-938", "segmentation_id": 938, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde8de653-ec", "ovs_interfaceid": "de8de653-ec88-4a72-840c-27978f584581", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1887.690540] env[62684]: DEBUG nova.compute.manager [req-312d7aad-58b8-4f86-8f1b-9805f177e4ad req-abdc5742-0b6e-47c3-a913-d1d9f5e24b53 service nova] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Received event network-changed-35a0f9ef-b68c-43df-8887-6c35257bbc58 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1887.690540] env[62684]: DEBUG nova.compute.manager [req-312d7aad-58b8-4f86-8f1b-9805f177e4ad req-abdc5742-0b6e-47c3-a913-d1d9f5e24b53 service nova] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Refreshing instance network info cache due to event network-changed-35a0f9ef-b68c-43df-8887-6c35257bbc58. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1887.690770] env[62684]: DEBUG oslo_concurrency.lockutils [req-312d7aad-58b8-4f86-8f1b-9805f177e4ad req-abdc5742-0b6e-47c3-a913-d1d9f5e24b53 service nova] Acquiring lock "refresh_cache-b1f70e39-bf37-4fb8-b95b-653b59bec265" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1887.691072] env[62684]: DEBUG oslo_concurrency.lockutils [req-312d7aad-58b8-4f86-8f1b-9805f177e4ad req-abdc5742-0b6e-47c3-a913-d1d9f5e24b53 service nova] Acquired lock "refresh_cache-b1f70e39-bf37-4fb8-b95b-653b59bec265" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1887.691362] env[62684]: DEBUG nova.network.neutron [req-312d7aad-58b8-4f86-8f1b-9805f177e4ad req-abdc5742-0b6e-47c3-a913-d1d9f5e24b53 service nova] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Refreshing network info cache for port 35a0f9ef-b68c-43df-8887-6c35257bbc58 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1887.806310] env[62684]: DEBUG nova.compute.manager [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1887.826064] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052773, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.852378] env[62684]: DEBUG oslo_vmware.api [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2052774, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.975131] env[62684]: DEBUG oslo_vmware.api [None req-18874118-bb2d-4984-9f2a-fca1202c8b3b tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Task: {'id': task-2052775, 'name': PowerOffVM_Task, 'duration_secs': 0.207143} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.975467] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-18874118-bb2d-4984-9f2a-fca1202c8b3b tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1887.975653] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-18874118-bb2d-4984-9f2a-fca1202c8b3b tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1887.976205] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-16b18fea-d91d-4e0e-8f17-67ce8193be15 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.984979] env[62684]: DEBUG oslo_concurrency.lockutils [None req-83b12138-2343-421d-a96e-3bec366690c7 tempest-ServerExternalEventsTest-1559920294 tempest-ServerExternalEventsTest-1559920294-project-member] Lock "d06f3099-d05f-417f-a71a-7b368590624f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.161s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1887.987107] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "d06f3099-d05f-417f-a71a-7b368590624f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 32.634s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1887.989278] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a751163e-c796-4e6c-97c9-558392bfcaf0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.002288] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3824d176-64bb-4d7b-a58c-ecf2bcc33fd6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.050197] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-18874118-bb2d-4984-9f2a-fca1202c8b3b tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1888.050559] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-18874118-bb2d-4984-9f2a-fca1202c8b3b tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1888.050559] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-18874118-bb2d-4984-9f2a-fca1202c8b3b tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Deleting the datastore file [datastore2] 0dbd52ac-c987-4728-974e-73e99465c5e7 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1888.052258] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-19b610b5-239a-4421-8200-d3229b02585c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.059637] env[62684]: DEBUG oslo_vmware.api [None req-18874118-bb2d-4984-9f2a-fca1202c8b3b tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Waiting for the task: (returnval){ [ 1888.059637] env[62684]: value = "task-2052777" [ 1888.059637] env[62684]: _type = "Task" [ 1888.059637] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1888.070331] env[62684]: DEBUG oslo_vmware.api [None req-18874118-bb2d-4984-9f2a-fca1202c8b3b tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Task: {'id': task-2052777, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.167156] env[62684]: DEBUG oslo_concurrency.lockutils [req-8e8ad003-5d75-4090-baa3-8642b724dd80 req-d3716c13-8ba7-4ba0-87d3-651c29935dcc service nova] Releasing lock "refresh_cache-0dbd52ac-c987-4728-974e-73e99465c5e7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1888.202777] env[62684]: DEBUG nova.compute.manager [req-b475d381-936e-43cb-9781-5cd8515acf72 req-fa270c13-847c-48db-9683-27ac4eb829ea service nova] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Received event network-vif-deleted-8da4cae5-6ee0-4160-8056-921fb0de7b4f {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1888.203031] env[62684]: INFO nova.compute.manager [req-b475d381-936e-43cb-9781-5cd8515acf72 req-fa270c13-847c-48db-9683-27ac4eb829ea service nova] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Neutron deleted interface 8da4cae5-6ee0-4160-8056-921fb0de7b4f; detaching it from the instance and deleting it from the info cache [ 1888.205105] env[62684]: DEBUG nova.network.neutron [req-b475d381-936e-43cb-9781-5cd8515acf72 req-fa270c13-847c-48db-9683-27ac4eb829ea service nova] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1888.324038] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052773, 'name': CreateVM_Task, 'duration_secs': 0.60779} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1888.326165] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1888.327366] env[62684]: DEBUG oslo_concurrency.lockutils [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1888.327988] env[62684]: DEBUG oslo_concurrency.lockutils [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1888.329112] env[62684]: DEBUG oslo_concurrency.lockutils [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1888.329112] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c403d9d-5858-4ed7-a1da-118245373166 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.334123] env[62684]: DEBUG oslo_vmware.api [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1888.334123] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52284808-41eb-5801-7a34-54f686196e58" [ 1888.334123] env[62684]: _type = "Task" [ 1888.334123] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1888.344570] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1888.352217] env[62684]: DEBUG oslo_vmware.api [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52284808-41eb-5801-7a34-54f686196e58, 'name': SearchDatastore_Task} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1888.354658] env[62684]: DEBUG oslo_concurrency.lockutils [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1888.355154] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1888.355544] env[62684]: DEBUG oslo_concurrency.lockutils [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1888.355847] env[62684]: DEBUG oslo_concurrency.lockutils [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1888.356216] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1888.360021] env[62684]: DEBUG oslo_vmware.api [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2052774, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.662722} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1888.360021] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bb619396-f6ed-4195-82cd-66768e7acebe {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.360021] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1/ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1888.360021] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1888.360021] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-336145f8-a2d3-44f0-83fe-a0aca0f65c48 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.370531] env[62684]: DEBUG oslo_vmware.api [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 1888.370531] env[62684]: value = "task-2052778" [ 1888.370531] env[62684]: _type = "Task" [ 1888.370531] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1888.372358] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1888.372729] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1888.380225] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8796948f-42d2-4ca3-b39e-e5eb0297fa7b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.391059] env[62684]: DEBUG oslo_vmware.api [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2052778, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.392751] env[62684]: DEBUG oslo_vmware.api [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1888.392751] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f268d2-e492-d89e-bb44-b2318fe336e3" [ 1888.392751] env[62684]: _type = "Task" [ 1888.392751] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1888.402716] env[62684]: DEBUG oslo_vmware.api [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f268d2-e492-d89e-bb44-b2318fe336e3, 'name': SearchDatastore_Task, 'duration_secs': 0.010889} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1888.403889] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d196aa89-0444-4cca-b6cf-0d3c802f0439 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.412024] env[62684]: DEBUG oslo_vmware.api [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1888.412024] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52adf38a-0eb3-af6d-2545-17e902f59d03" [ 1888.412024] env[62684]: _type = "Task" [ 1888.412024] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1888.424803] env[62684]: DEBUG oslo_vmware.api [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52adf38a-0eb3-af6d-2545-17e902f59d03, 'name': SearchDatastore_Task, 'duration_secs': 0.010126} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1888.424803] env[62684]: DEBUG oslo_concurrency.lockutils [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1888.425075] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] b1f70e39-bf37-4fb8-b95b-653b59bec265/b1f70e39-bf37-4fb8-b95b-653b59bec265.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1888.426175] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d975ae33-f29e-4656-94f7-fbde59fc066c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.432610] env[62684]: DEBUG oslo_vmware.api [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1888.432610] env[62684]: value = "task-2052779" [ 1888.432610] env[62684]: _type = "Task" [ 1888.432610] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1888.443282] env[62684]: DEBUG oslo_vmware.api [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052779, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.457105] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ba9f88f5-ac0a-47cd-b4d4-315ab7feea99 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1888.458759] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 32.557s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1888.458956] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1888.459156] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62684) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1888.459512] env[62684]: DEBUG oslo_concurrency.lockutils [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.772s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1888.461267] env[62684]: INFO nova.compute.claims [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1888.468667] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6ec20e2-c8e4-42cb-ae6a-9bb74cca3ec2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.475530] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-318528cd-b72e-49e9-9a6b-bc5e177a6745 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.496219] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fd74b6b-8494-4027-bbec-2c78a065fd09 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.510275] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bdadc55-b0a0-47fd-8e53-2b4a331c2724 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.543935] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178523MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=62684) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1888.544171] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1888.544916] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "d06f3099-d05f-417f-a71a-7b368590624f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.558s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1888.555553] env[62684]: DEBUG nova.network.neutron [-] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1888.574576] env[62684]: DEBUG oslo_vmware.api [None req-18874118-bb2d-4984-9f2a-fca1202c8b3b tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Task: {'id': task-2052777, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.383432} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1888.575558] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-18874118-bb2d-4984-9f2a-fca1202c8b3b tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1888.575821] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-18874118-bb2d-4984-9f2a-fca1202c8b3b tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1888.576060] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-18874118-bb2d-4984-9f2a-fca1202c8b3b tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1888.576403] env[62684]: INFO nova.compute.manager [None req-18874118-bb2d-4984-9f2a-fca1202c8b3b tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1888.576486] env[62684]: DEBUG oslo.service.loopingcall [None req-18874118-bb2d-4984-9f2a-fca1202c8b3b tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1888.576966] env[62684]: DEBUG nova.compute.manager [-] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1888.577195] env[62684]: DEBUG nova.network.neutron [-] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1888.706998] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-40e8a973-3f38-4888-8298-73afcba1b7b7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.718184] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd971d05-431d-4231-82de-55045f5cf394 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.751821] env[62684]: DEBUG nova.compute.manager [req-b475d381-936e-43cb-9781-5cd8515acf72 req-fa270c13-847c-48db-9683-27ac4eb829ea service nova] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Detach interface failed, port_id=8da4cae5-6ee0-4160-8056-921fb0de7b4f, reason: Instance b945f05d-ef1c-4469-9390-f7bbd4f435f0 could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1888.885546] env[62684]: DEBUG oslo_vmware.api [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2052778, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084151} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1888.886041] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1888.887367] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c99eaa26-4014-4d22-a890-7e9d1aa73ec3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.925821] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Reconfiguring VM instance instance-0000002c to attach disk [datastore1] ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1/ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1888.930013] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d4a3931c-de63-4bff-ac56-95309246ddfc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.972146] env[62684]: DEBUG oslo_vmware.api [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 1888.972146] env[62684]: value = "task-2052780" [ 1888.972146] env[62684]: _type = "Task" [ 1888.972146] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1888.974827] env[62684]: DEBUG oslo_vmware.api [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052779, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.986037] env[62684]: DEBUG oslo_vmware.api [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2052780, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.058447] env[62684]: INFO nova.compute.manager [-] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Took 1.51 seconds to deallocate network for instance. [ 1889.094238] env[62684]: DEBUG nova.network.neutron [req-312d7aad-58b8-4f86-8f1b-9805f177e4ad req-abdc5742-0b6e-47c3-a913-d1d9f5e24b53 service nova] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Updated VIF entry in instance network info cache for port 35a0f9ef-b68c-43df-8887-6c35257bbc58. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1889.094238] env[62684]: DEBUG nova.network.neutron [req-312d7aad-58b8-4f86-8f1b-9805f177e4ad req-abdc5742-0b6e-47c3-a913-d1d9f5e24b53 service nova] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Updating instance_info_cache with network_info: [{"id": "35a0f9ef-b68c-43df-8887-6c35257bbc58", "address": "fa:16:3e:7a:c4:6c", "network": {"id": "bd253713-4e81-4c94-9689-22b81e7f51b6", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-307001665-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd812751722143fabedfa986a2d98b59", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35a0f9ef-b6", "ovs_interfaceid": "35a0f9ef-b68c-43df-8887-6c35257bbc58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1889.236762] env[62684]: DEBUG oslo_concurrency.lockutils [None req-854730a8-26bf-44f5-b213-04662d09f2f8 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Acquiring lock "50bc9674-d19c-40f1-a89f-1738a1e48307" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1889.237147] env[62684]: DEBUG oslo_concurrency.lockutils [None req-854730a8-26bf-44f5-b213-04662d09f2f8 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Lock "50bc9674-d19c-40f1-a89f-1738a1e48307" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1889.237440] env[62684]: DEBUG oslo_concurrency.lockutils [None req-854730a8-26bf-44f5-b213-04662d09f2f8 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Acquiring lock "50bc9674-d19c-40f1-a89f-1738a1e48307-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1889.237568] env[62684]: DEBUG oslo_concurrency.lockutils [None req-854730a8-26bf-44f5-b213-04662d09f2f8 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Lock "50bc9674-d19c-40f1-a89f-1738a1e48307-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1889.237749] env[62684]: DEBUG oslo_concurrency.lockutils [None req-854730a8-26bf-44f5-b213-04662d09f2f8 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Lock "50bc9674-d19c-40f1-a89f-1738a1e48307-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1889.240135] env[62684]: INFO nova.compute.manager [None req-854730a8-26bf-44f5-b213-04662d09f2f8 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Terminating instance [ 1889.244428] env[62684]: DEBUG nova.compute.manager [None req-854730a8-26bf-44f5-b213-04662d09f2f8 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1889.244636] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-854730a8-26bf-44f5-b213-04662d09f2f8 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1889.245500] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ccc4132-70af-451c-ad38-eabc57b0cfb5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.253433] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-854730a8-26bf-44f5-b213-04662d09f2f8 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1889.253812] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-27325570-5371-4841-92a0-a38bb284eab4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.260307] env[62684]: DEBUG oslo_vmware.api [None req-854730a8-26bf-44f5-b213-04662d09f2f8 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Waiting for the task: (returnval){ [ 1889.260307] env[62684]: value = "task-2052781" [ 1889.260307] env[62684]: _type = "Task" [ 1889.260307] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1889.273699] env[62684]: DEBUG oslo_vmware.api [None req-854730a8-26bf-44f5-b213-04662d09f2f8 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Task: {'id': task-2052781, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.492565] env[62684]: DEBUG oslo_vmware.api [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052779, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.699012} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1889.494695] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] b1f70e39-bf37-4fb8-b95b-653b59bec265/b1f70e39-bf37-4fb8-b95b-653b59bec265.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1889.494955] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1889.495535] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-84253e58-6d5c-41ce-b743-48ab8926e280 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.502230] env[62684]: DEBUG oslo_vmware.api [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2052780, 'name': ReconfigVM_Task, 'duration_secs': 0.472267} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1889.503027] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Reconfigured VM instance instance-0000002c to attach disk [datastore1] ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1/ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1889.504187] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-678ec6cb-f82d-4552-a7a2-ee3a4c87694e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.509460] env[62684]: DEBUG oslo_vmware.api [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1889.509460] env[62684]: value = "task-2052782" [ 1889.509460] env[62684]: _type = "Task" [ 1889.509460] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1889.514786] env[62684]: DEBUG oslo_vmware.api [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 1889.514786] env[62684]: value = "task-2052783" [ 1889.514786] env[62684]: _type = "Task" [ 1889.514786] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1889.524640] env[62684]: DEBUG oslo_vmware.api [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052782, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.532170] env[62684]: DEBUG oslo_vmware.api [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2052783, 'name': Rename_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.565785] env[62684]: DEBUG oslo_concurrency.lockutils [None req-29bb95d8-08e7-4b34-8dfc-140f378a8598 tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1889.598362] env[62684]: DEBUG oslo_concurrency.lockutils [req-312d7aad-58b8-4f86-8f1b-9805f177e4ad req-abdc5742-0b6e-47c3-a913-d1d9f5e24b53 service nova] Releasing lock "refresh_cache-b1f70e39-bf37-4fb8-b95b-653b59bec265" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1889.766012] env[62684]: DEBUG nova.compute.manager [req-3431869b-df3f-448f-8395-b958d1ffb912 req-00dd52b7-ded7-48c3-b679-8aaac62f8740 service nova] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Received event network-vif-deleted-de8de653-ec88-4a72-840c-27978f584581 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1889.766354] env[62684]: INFO nova.compute.manager [req-3431869b-df3f-448f-8395-b958d1ffb912 req-00dd52b7-ded7-48c3-b679-8aaac62f8740 service nova] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Neutron deleted interface de8de653-ec88-4a72-840c-27978f584581; detaching it from the instance and deleting it from the info cache [ 1889.766516] env[62684]: DEBUG nova.network.neutron [req-3431869b-df3f-448f-8395-b958d1ffb912 req-00dd52b7-ded7-48c3-b679-8aaac62f8740 service nova] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1889.780400] env[62684]: DEBUG oslo_vmware.api [None req-854730a8-26bf-44f5-b213-04662d09f2f8 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Task: {'id': task-2052781, 'name': PowerOffVM_Task, 'duration_secs': 0.211298} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1889.780976] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-854730a8-26bf-44f5-b213-04662d09f2f8 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1889.780976] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-854730a8-26bf-44f5-b213-04662d09f2f8 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1889.781197] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-999fdfa4-eec9-4e31-a1a8-c434ed80c9d4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.821315] env[62684]: DEBUG nova.network.neutron [-] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1889.881607] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-854730a8-26bf-44f5-b213-04662d09f2f8 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1889.881607] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-854730a8-26bf-44f5-b213-04662d09f2f8 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1889.881607] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-854730a8-26bf-44f5-b213-04662d09f2f8 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Deleting the datastore file [datastore1] 50bc9674-d19c-40f1-a89f-1738a1e48307 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1889.881607] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d8049077-8b18-4d2c-9e04-19c5e7f19503 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.888718] env[62684]: DEBUG oslo_vmware.api [None req-854730a8-26bf-44f5-b213-04662d09f2f8 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Waiting for the task: (returnval){ [ 1889.888718] env[62684]: value = "task-2052785" [ 1889.888718] env[62684]: _type = "Task" [ 1889.888718] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1889.899515] env[62684]: DEBUG oslo_vmware.api [None req-854730a8-26bf-44f5-b213-04662d09f2f8 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Task: {'id': task-2052785, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.021492] env[62684]: DEBUG oslo_vmware.api [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052782, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.032999] env[62684]: DEBUG oslo_vmware.api [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2052783, 'name': Rename_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.068126] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4e4d176-4abd-432d-a58c-e6b352da8167 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.076512] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0fe8f3e-d0c3-47e6-82a5-81fb1893962e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.110579] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-902b17bb-9ad7-4f16-919f-734169ad71dd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.118343] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52ec4239-1ca4-4c76-a02d-cface971ba2f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.133280] env[62684]: DEBUG nova.compute.provider_tree [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1890.272951] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-609108fd-83ae-48b5-8c29-cc852034562b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.286016] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ff69a28-cd2f-4d56-99e5-0e7a75b5e9b6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.318827] env[62684]: DEBUG nova.compute.manager [req-3431869b-df3f-448f-8395-b958d1ffb912 req-00dd52b7-ded7-48c3-b679-8aaac62f8740 service nova] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Detach interface failed, port_id=de8de653-ec88-4a72-840c-27978f584581, reason: Instance 0dbd52ac-c987-4728-974e-73e99465c5e7 could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1890.323590] env[62684]: INFO nova.compute.manager [-] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Took 1.75 seconds to deallocate network for instance. [ 1890.401554] env[62684]: DEBUG oslo_vmware.api [None req-854730a8-26bf-44f5-b213-04662d09f2f8 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Task: {'id': task-2052785, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.188847} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1890.401554] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-854730a8-26bf-44f5-b213-04662d09f2f8 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1890.401554] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-854730a8-26bf-44f5-b213-04662d09f2f8 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1890.401554] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-854730a8-26bf-44f5-b213-04662d09f2f8 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1890.401554] env[62684]: INFO nova.compute.manager [None req-854730a8-26bf-44f5-b213-04662d09f2f8 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1890.401846] env[62684]: DEBUG oslo.service.loopingcall [None req-854730a8-26bf-44f5-b213-04662d09f2f8 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1890.401846] env[62684]: DEBUG nova.compute.manager [-] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1890.401846] env[62684]: DEBUG nova.network.neutron [-] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1890.521182] env[62684]: DEBUG oslo_vmware.api [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052782, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.958517} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1890.521552] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1890.525062] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11bdf8bb-0da7-4af1-9556-0bf1c9f798d6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.534387] env[62684]: DEBUG oslo_vmware.api [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2052783, 'name': Rename_Task, 'duration_secs': 1.00286} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1890.545644] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1890.554725] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] b1f70e39-bf37-4fb8-b95b-653b59bec265/b1f70e39-bf37-4fb8-b95b-653b59bec265.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1890.555270] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d4b059fe-9e30-4597-9fa8-291975aaaa55 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.557195] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dd5fb0a6-83c4-4680-8b08-cfcfd60c09a2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.578368] env[62684]: DEBUG oslo_vmware.api [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1890.578368] env[62684]: value = "task-2052787" [ 1890.578368] env[62684]: _type = "Task" [ 1890.578368] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1890.578368] env[62684]: DEBUG oslo_vmware.api [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 1890.578368] env[62684]: value = "task-2052786" [ 1890.578368] env[62684]: _type = "Task" [ 1890.578368] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1890.589788] env[62684]: DEBUG oslo_vmware.api [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052787, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.593243] env[62684]: DEBUG oslo_vmware.api [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2052786, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.654791] env[62684]: ERROR nova.scheduler.client.report [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] [req-32cc9442-14af-4f2b-af0d-5d83632bef33] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c23c281e-ec1f-4876-972e-a98655f2084f. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-32cc9442-14af-4f2b-af0d-5d83632bef33"}]} [ 1890.680615] env[62684]: DEBUG nova.scheduler.client.report [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Refreshing inventories for resource provider c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1890.700021] env[62684]: DEBUG nova.scheduler.client.report [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Updating ProviderTree inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1890.700021] env[62684]: DEBUG nova.compute.provider_tree [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1890.713279] env[62684]: DEBUG nova.scheduler.client.report [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Refreshing aggregate associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, aggregates: None {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1890.734348] env[62684]: DEBUG nova.scheduler.client.report [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Refreshing trait associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1890.831634] env[62684]: DEBUG oslo_concurrency.lockutils [None req-18874118-bb2d-4984-9f2a-fca1202c8b3b tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1891.094457] env[62684]: DEBUG oslo_vmware.api [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2052786, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.097841] env[62684]: DEBUG oslo_vmware.api [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052787, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.193801] env[62684]: DEBUG nova.network.neutron [-] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1891.260267] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9254a05e-b74c-47e1-8a64-a6b83c173e39 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.269835] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49a579a3-4abb-4029-ac7f-a6b36fffeb1e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.315912] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f78aa008-e0d6-42c7-a49a-c08f9c89a43d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.325048] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c8d6e97-b189-4fd0-b108-b25f76818529 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.339049] env[62684]: DEBUG nova.compute.provider_tree [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1891.589488] env[62684]: DEBUG oslo_vmware.api [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052787, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.597869] env[62684]: DEBUG oslo_vmware.api [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2052786, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.695570] env[62684]: INFO nova.compute.manager [-] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Took 1.29 seconds to deallocate network for instance. [ 1891.883897] env[62684]: DEBUG nova.scheduler.client.report [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Updated inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f with generation 70 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1891.883897] env[62684]: DEBUG nova.compute.provider_tree [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Updating resource provider c23c281e-ec1f-4876-972e-a98655f2084f generation from 70 to 71 during operation: update_inventory {{(pid=62684) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1891.883897] env[62684]: DEBUG nova.compute.provider_tree [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1892.035359] env[62684]: DEBUG nova.compute.manager [req-6fb1d4a4-69cb-4840-b6d7-f26b4f467150 req-2047abbd-4bd5-42dd-8f40-7404c33fbfd8 service nova] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Received event network-vif-deleted-c9bcfcec-29a8-4bb0-91f9-14d8c744d944 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1892.088939] env[62684]: DEBUG oslo_vmware.api [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052787, 'name': ReconfigVM_Task, 'duration_secs': 1.245598} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1892.089260] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Reconfigured VM instance instance-0000002d to attach disk [datastore1] b1f70e39-bf37-4fb8-b95b-653b59bec265/b1f70e39-bf37-4fb8-b95b-653b59bec265.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1892.089927] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a57f339a-006e-4525-94a7-e0edc6c8717c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.099771] env[62684]: DEBUG oslo_vmware.api [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2052786, 'name': PowerOnVM_Task, 'duration_secs': 1.391175} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1892.101383] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1892.101617] env[62684]: INFO nova.compute.manager [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Took 9.88 seconds to spawn the instance on the hypervisor. [ 1892.101833] env[62684]: DEBUG nova.compute.manager [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1892.102190] env[62684]: DEBUG oslo_vmware.api [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1892.102190] env[62684]: value = "task-2052788" [ 1892.102190] env[62684]: _type = "Task" [ 1892.102190] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1892.102869] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddce8129-1753-48f1-8609-b23006f62ad4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.112748] env[62684]: DEBUG oslo_vmware.api [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052788, 'name': Rename_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.206335] env[62684]: DEBUG oslo_concurrency.lockutils [None req-854730a8-26bf-44f5-b213-04662d09f2f8 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.394163] env[62684]: DEBUG oslo_concurrency.lockutils [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.934s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1892.394724] env[62684]: DEBUG nova.compute.manager [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1892.397953] env[62684]: DEBUG oslo_concurrency.lockutils [None req-091a64e3-3926-4ff2-8ba5-0483d14d969c tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.718s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.398213] env[62684]: DEBUG nova.objects.instance [None req-091a64e3-3926-4ff2-8ba5-0483d14d969c tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lazy-loading 'resources' on Instance uuid e08f8636-5193-40fa-972c-f0ecab193fc1 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1892.472369] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Acquiring lock "31419285-9fdf-4d37-94d7-d1b08c6b6b05" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.472626] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Lock "31419285-9fdf-4d37-94d7-d1b08c6b6b05" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.614712] env[62684]: DEBUG oslo_vmware.api [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052788, 'name': Rename_Task, 'duration_secs': 0.165865} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1892.614960] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1892.615338] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-894a245b-2ff3-47b1-b7e7-32b6c92600ba {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.625395] env[62684]: DEBUG oslo_vmware.api [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1892.625395] env[62684]: value = "task-2052789" [ 1892.625395] env[62684]: _type = "Task" [ 1892.625395] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1892.629722] env[62684]: INFO nova.compute.manager [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Took 47.82 seconds to build instance. [ 1892.636404] env[62684]: DEBUG oslo_vmware.api [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052789, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.905466] env[62684]: DEBUG nova.compute.utils [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1892.907463] env[62684]: DEBUG nova.compute.manager [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1892.907463] env[62684]: DEBUG nova.network.neutron [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1892.958257] env[62684]: DEBUG nova.policy [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1d537b50efba47deadcc9c04060c9ca1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '17c9f5881b06427e969a783fe44135d1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1893.131704] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dd824167-a92b-40e9-88a5-043470bc59a2 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 90.784s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1893.141649] env[62684]: DEBUG oslo_vmware.api [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052789, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.321343] env[62684]: DEBUG nova.network.neutron [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Successfully created port: d011162f-0ef0-4133-ac1b-f7ed8a3a8a3c {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1893.411400] env[62684]: DEBUG nova.compute.manager [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1893.473756] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07c0c82f-62ba-427f-a962-8696ca62a7bc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.482464] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-914bbb1a-348c-4059-b2ba-ae42ffd781d6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.523202] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35b6b934-87cb-4fe1-863e-c056a5ae0f5d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.530203] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52e6e062-a105-481b-9a3d-bb1ce00b5e59 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.545104] env[62684]: DEBUG nova.compute.provider_tree [None req-091a64e3-3926-4ff2-8ba5-0483d14d969c tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1893.638030] env[62684]: DEBUG oslo_vmware.api [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052789, 'name': PowerOnVM_Task, 'duration_secs': 0.727891} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1893.638432] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1893.638650] env[62684]: INFO nova.compute.manager [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Took 8.61 seconds to spawn the instance on the hypervisor. [ 1893.638844] env[62684]: DEBUG nova.compute.manager [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1893.639647] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41194c16-7e33-4295-a662-98f3ad4b7587 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.643185] env[62684]: DEBUG nova.compute.manager [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1893.790963] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Acquiring lock "18a97088-fffa-4b77-8ab0-d24f6f84f516" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1893.791417] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Lock "18a97088-fffa-4b77-8ab0-d24f6f84f516" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1894.048082] env[62684]: DEBUG nova.scheduler.client.report [None req-091a64e3-3926-4ff2-8ba5-0483d14d969c tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1894.162495] env[62684]: INFO nova.compute.manager [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Took 49.19 seconds to build instance. [ 1894.173767] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1894.415560] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "a56a3fab-e491-44f5-9cf4-2c308138ffc4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1894.416515] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "a56a3fab-e491-44f5-9cf4-2c308138ffc4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1894.420963] env[62684]: DEBUG nova.compute.manager [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1894.447813] env[62684]: DEBUG nova.virt.hardware [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1894.448212] env[62684]: DEBUG nova.virt.hardware [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1894.448384] env[62684]: DEBUG nova.virt.hardware [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1894.448580] env[62684]: DEBUG nova.virt.hardware [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1894.448734] env[62684]: DEBUG nova.virt.hardware [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1894.449085] env[62684]: DEBUG nova.virt.hardware [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1894.449171] env[62684]: DEBUG nova.virt.hardware [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1894.449283] env[62684]: DEBUG nova.virt.hardware [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1894.449596] env[62684]: DEBUG nova.virt.hardware [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1894.449879] env[62684]: DEBUG nova.virt.hardware [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1894.450073] env[62684]: DEBUG nova.virt.hardware [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1894.451307] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57425980-069c-4554-a84e-670def5245cf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.459673] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d45a1840-a536-43be-8b0a-314d389d7f6d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.553625] env[62684]: DEBUG oslo_concurrency.lockutils [None req-091a64e3-3926-4ff2-8ba5-0483d14d969c tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.155s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1894.559023] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.442s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1894.559023] env[62684]: INFO nova.compute.claims [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1894.577946] env[62684]: INFO nova.scheduler.client.report [None req-091a64e3-3926-4ff2-8ba5-0483d14d969c tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Deleted allocations for instance e08f8636-5193-40fa-972c-f0ecab193fc1 [ 1894.664354] env[62684]: DEBUG oslo_concurrency.lockutils [None req-338e5e51-745f-48bd-8bd2-e76ee07081b8 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lock "b1f70e39-bf37-4fb8-b95b-653b59bec265" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.132s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1894.766220] env[62684]: DEBUG nova.compute.manager [req-7b7494fc-ad43-43bc-8817-8d518cef0099 req-c215d62e-a36b-458e-8818-a71919abb98d service nova] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Received event network-vif-plugged-d011162f-0ef0-4133-ac1b-f7ed8a3a8a3c {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1894.766505] env[62684]: DEBUG oslo_concurrency.lockutils [req-7b7494fc-ad43-43bc-8817-8d518cef0099 req-c215d62e-a36b-458e-8818-a71919abb98d service nova] Acquiring lock "26303c0e-be87-41ff-a15c-e92f91f8a05f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1894.766863] env[62684]: DEBUG oslo_concurrency.lockutils [req-7b7494fc-ad43-43bc-8817-8d518cef0099 req-c215d62e-a36b-458e-8818-a71919abb98d service nova] Lock "26303c0e-be87-41ff-a15c-e92f91f8a05f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1894.766968] env[62684]: DEBUG oslo_concurrency.lockutils [req-7b7494fc-ad43-43bc-8817-8d518cef0099 req-c215d62e-a36b-458e-8818-a71919abb98d service nova] Lock "26303c0e-be87-41ff-a15c-e92f91f8a05f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1894.767169] env[62684]: DEBUG nova.compute.manager [req-7b7494fc-ad43-43bc-8817-8d518cef0099 req-c215d62e-a36b-458e-8818-a71919abb98d service nova] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] No waiting events found dispatching network-vif-plugged-d011162f-0ef0-4133-ac1b-f7ed8a3a8a3c {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1894.767353] env[62684]: WARNING nova.compute.manager [req-7b7494fc-ad43-43bc-8817-8d518cef0099 req-c215d62e-a36b-458e-8818-a71919abb98d service nova] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Received unexpected event network-vif-plugged-d011162f-0ef0-4133-ac1b-f7ed8a3a8a3c for instance with vm_state building and task_state spawning. [ 1894.880297] env[62684]: DEBUG nova.network.neutron [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Successfully updated port: d011162f-0ef0-4133-ac1b-f7ed8a3a8a3c {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1895.087843] env[62684]: DEBUG oslo_concurrency.lockutils [None req-091a64e3-3926-4ff2-8ba5-0483d14d969c tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "e08f8636-5193-40fa-972c-f0ecab193fc1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.975s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.167029] env[62684]: DEBUG nova.compute.manager [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1895.387990] env[62684]: DEBUG oslo_concurrency.lockutils [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Acquiring lock "refresh_cache-26303c0e-be87-41ff-a15c-e92f91f8a05f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1895.389061] env[62684]: DEBUG oslo_concurrency.lockutils [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Acquired lock "refresh_cache-26303c0e-be87-41ff-a15c-e92f91f8a05f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1895.389061] env[62684]: DEBUG nova.network.neutron [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1895.683706] env[62684]: DEBUG oslo_concurrency.lockutils [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.924227] env[62684]: DEBUG nova.network.neutron [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1896.004273] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-331ad61b-1630-4af0-b0fb-1d2f15fc71a6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.015508] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55e14a90-7292-4fa9-850f-21b6e2a2c6a1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.048559] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cce9476-bb77-4b7b-887b-3e2e955fb83f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.056566] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a42e211-77b2-46c5-b59d-d7c09812f02d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.075081] env[62684]: DEBUG nova.compute.provider_tree [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1896.119141] env[62684]: DEBUG nova.network.neutron [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Updating instance_info_cache with network_info: [{"id": "d011162f-0ef0-4133-ac1b-f7ed8a3a8a3c", "address": "fa:16:3e:3c:35:95", "network": {"id": "5bdb9a67-50c7-45c7-b9c4-bef767f22efc", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-2129055498-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17c9f5881b06427e969a783fe44135d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec763be6-4041-4651-8fd7-3820cf0ab86d", "external-id": "nsx-vlan-transportzone-943", "segmentation_id": 943, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd011162f-0e", "ovs_interfaceid": "d011162f-0ef0-4133-ac1b-f7ed8a3a8a3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1896.581399] env[62684]: DEBUG nova.scheduler.client.report [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1896.620128] env[62684]: DEBUG oslo_concurrency.lockutils [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Releasing lock "refresh_cache-26303c0e-be87-41ff-a15c-e92f91f8a05f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1896.620699] env[62684]: DEBUG nova.compute.manager [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Instance network_info: |[{"id": "d011162f-0ef0-4133-ac1b-f7ed8a3a8a3c", "address": "fa:16:3e:3c:35:95", "network": {"id": "5bdb9a67-50c7-45c7-b9c4-bef767f22efc", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-2129055498-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17c9f5881b06427e969a783fe44135d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec763be6-4041-4651-8fd7-3820cf0ab86d", "external-id": "nsx-vlan-transportzone-943", "segmentation_id": 943, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd011162f-0e", "ovs_interfaceid": "d011162f-0ef0-4133-ac1b-f7ed8a3a8a3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1896.621180] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3c:35:95', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ec763be6-4041-4651-8fd7-3820cf0ab86d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd011162f-0ef0-4133-ac1b-f7ed8a3a8a3c', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1896.628906] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Creating folder: Project (17c9f5881b06427e969a783fe44135d1). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1896.629219] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4f7728d6-2fe5-4d49-ab13-dafe4bc5d0f7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.640384] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Created folder: Project (17c9f5881b06427e969a783fe44135d1) in parent group-v421118. [ 1896.640599] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Creating folder: Instances. Parent ref: group-v421256. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1896.640851] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-77072dc1-2d58-477e-9145-d1aa0100070d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.650637] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Created folder: Instances in parent group-v421256. [ 1896.650637] env[62684]: DEBUG oslo.service.loopingcall [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1896.650815] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1896.650985] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8c2323d4-6f92-484d-bea0-8b9522c7f8a9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.669815] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1896.669815] env[62684]: value = "task-2052792" [ 1896.669815] env[62684]: _type = "Task" [ 1896.669815] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1896.677422] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052792, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.800266] env[62684]: DEBUG nova.compute.manager [req-347b8aa9-a7af-4d4c-b049-645259c95de9 req-4582d373-c2b3-45e4-867d-8ea5ef7023f0 service nova] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Received event network-changed-d011162f-0ef0-4133-ac1b-f7ed8a3a8a3c {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1896.800266] env[62684]: DEBUG nova.compute.manager [req-347b8aa9-a7af-4d4c-b049-645259c95de9 req-4582d373-c2b3-45e4-867d-8ea5ef7023f0 service nova] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Refreshing instance network info cache due to event network-changed-d011162f-0ef0-4133-ac1b-f7ed8a3a8a3c. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1896.800502] env[62684]: DEBUG oslo_concurrency.lockutils [req-347b8aa9-a7af-4d4c-b049-645259c95de9 req-4582d373-c2b3-45e4-867d-8ea5ef7023f0 service nova] Acquiring lock "refresh_cache-26303c0e-be87-41ff-a15c-e92f91f8a05f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1896.800652] env[62684]: DEBUG oslo_concurrency.lockutils [req-347b8aa9-a7af-4d4c-b049-645259c95de9 req-4582d373-c2b3-45e4-867d-8ea5ef7023f0 service nova] Acquired lock "refresh_cache-26303c0e-be87-41ff-a15c-e92f91f8a05f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1896.800820] env[62684]: DEBUG nova.network.neutron [req-347b8aa9-a7af-4d4c-b049-645259c95de9 req-4582d373-c2b3-45e4-867d-8ea5ef7023f0 service nova] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Refreshing network info cache for port d011162f-0ef0-4133-ac1b-f7ed8a3a8a3c {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1897.085920] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.530s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1897.086505] env[62684]: DEBUG nova.compute.manager [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1897.092027] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.950s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1897.094689] env[62684]: INFO nova.compute.claims [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1897.180093] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052792, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.517224] env[62684]: DEBUG nova.network.neutron [req-347b8aa9-a7af-4d4c-b049-645259c95de9 req-4582d373-c2b3-45e4-867d-8ea5ef7023f0 service nova] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Updated VIF entry in instance network info cache for port d011162f-0ef0-4133-ac1b-f7ed8a3a8a3c. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1897.517608] env[62684]: DEBUG nova.network.neutron [req-347b8aa9-a7af-4d4c-b049-645259c95de9 req-4582d373-c2b3-45e4-867d-8ea5ef7023f0 service nova] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Updating instance_info_cache with network_info: [{"id": "d011162f-0ef0-4133-ac1b-f7ed8a3a8a3c", "address": "fa:16:3e:3c:35:95", "network": {"id": "5bdb9a67-50c7-45c7-b9c4-bef767f22efc", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-2129055498-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17c9f5881b06427e969a783fe44135d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec763be6-4041-4651-8fd7-3820cf0ab86d", "external-id": "nsx-vlan-transportzone-943", "segmentation_id": 943, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd011162f-0e", "ovs_interfaceid": "d011162f-0ef0-4133-ac1b-f7ed8a3a8a3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1897.601652] env[62684]: DEBUG nova.compute.utils [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1897.605123] env[62684]: DEBUG nova.compute.manager [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1897.605946] env[62684]: DEBUG nova.network.neutron [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1897.647581] env[62684]: DEBUG nova.policy [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '15ae7a383f294208909e3763b5429340', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bd812751722143fabedfa986a2d98b59', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1897.681529] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052792, 'name': CreateVM_Task, 'duration_secs': 0.629574} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1897.681529] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1897.681714] env[62684]: DEBUG oslo_concurrency.lockutils [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1897.681849] env[62684]: DEBUG oslo_concurrency.lockutils [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1897.682174] env[62684]: DEBUG oslo_concurrency.lockutils [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1897.682428] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6db2a883-9cf5-4ee3-85bf-ffabd4204b4a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.687190] env[62684]: DEBUG oslo_vmware.api [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Waiting for the task: (returnval){ [ 1897.687190] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]528e8298-b51d-77a1-07c9-1d6a7f57b78c" [ 1897.687190] env[62684]: _type = "Task" [ 1897.687190] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1897.695477] env[62684]: DEBUG oslo_vmware.api [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]528e8298-b51d-77a1-07c9-1d6a7f57b78c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.912920] env[62684]: DEBUG nova.network.neutron [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Successfully created port: 1ebe50b2-d6ab-48aa-b581-d2d09b588552 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1898.020993] env[62684]: DEBUG oslo_concurrency.lockutils [req-347b8aa9-a7af-4d4c-b049-645259c95de9 req-4582d373-c2b3-45e4-867d-8ea5ef7023f0 service nova] Releasing lock "refresh_cache-26303c0e-be87-41ff-a15c-e92f91f8a05f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1898.106180] env[62684]: DEBUG nova.compute.manager [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1898.168419] env[62684]: DEBUG oslo_concurrency.lockutils [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquiring lock "b4cd871a-30ea-4b7a-98ad-00b8676dc2cd" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1898.168699] env[62684]: DEBUG oslo_concurrency.lockutils [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lock "b4cd871a-30ea-4b7a-98ad-00b8676dc2cd" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1898.168889] env[62684]: INFO nova.compute.manager [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Shelving [ 1898.198593] env[62684]: DEBUG oslo_vmware.api [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]528e8298-b51d-77a1-07c9-1d6a7f57b78c, 'name': SearchDatastore_Task, 'duration_secs': 0.01028} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1898.198986] env[62684]: DEBUG oslo_concurrency.lockutils [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1898.199173] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1898.199414] env[62684]: DEBUG oslo_concurrency.lockutils [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1898.199602] env[62684]: DEBUG oslo_concurrency.lockutils [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1898.199791] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1898.200191] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dcb045b7-8ecb-4edd-acff-9e5ded97817c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.210176] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1898.210176] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1898.210806] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4631a07a-6741-4460-b5b6-971010ac3099 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.216395] env[62684]: DEBUG oslo_vmware.api [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Waiting for the task: (returnval){ [ 1898.216395] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529d6982-ab60-3957-2833-ea78ee128fb7" [ 1898.216395] env[62684]: _type = "Task" [ 1898.216395] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1898.228923] env[62684]: DEBUG oslo_vmware.api [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529d6982-ab60-3957-2833-ea78ee128fb7, 'name': SearchDatastore_Task, 'duration_secs': 0.009022} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1898.229698] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-971c0f6c-8293-4f62-8e87-643e8c9396ca {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.236292] env[62684]: DEBUG oslo_vmware.api [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Waiting for the task: (returnval){ [ 1898.236292] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521a158e-988d-938d-b6f9-bce3239eec4f" [ 1898.236292] env[62684]: _type = "Task" [ 1898.236292] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1898.244100] env[62684]: DEBUG oslo_vmware.api [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521a158e-988d-938d-b6f9-bce3239eec4f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.580683] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb686de6-0769-4f29-a0d9-9e24921adc46 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.589389] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f378f9d-97f2-428b-b185-be9d66bb5148 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.623529] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b33a5903-293c-41b3-a193-7eda805425b3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.631375] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fedac73-4da3-4704-b06e-dd3407acbf5a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.645135] env[62684]: DEBUG nova.compute.provider_tree [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1898.675012] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1898.675289] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ec4806cc-806b-4af9-a02d-cff6350fc8c0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.683304] env[62684]: DEBUG oslo_vmware.api [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 1898.683304] env[62684]: value = "task-2052793" [ 1898.683304] env[62684]: _type = "Task" [ 1898.683304] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1898.691563] env[62684]: DEBUG oslo_vmware.api [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2052793, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.749698] env[62684]: DEBUG oslo_vmware.api [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521a158e-988d-938d-b6f9-bce3239eec4f, 'name': SearchDatastore_Task, 'duration_secs': 0.008718} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1898.749984] env[62684]: DEBUG oslo_concurrency.lockutils [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1898.750271] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 26303c0e-be87-41ff-a15c-e92f91f8a05f/26303c0e-be87-41ff-a15c-e92f91f8a05f.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1898.750542] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-453cab9a-8846-48fd-b5d0-9fe1c6e8b764 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.757615] env[62684]: DEBUG oslo_vmware.api [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Waiting for the task: (returnval){ [ 1898.757615] env[62684]: value = "task-2052794" [ 1898.757615] env[62684]: _type = "Task" [ 1898.757615] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1898.765624] env[62684]: DEBUG oslo_vmware.api [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Task: {'id': task-2052794, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.128911] env[62684]: DEBUG nova.compute.manager [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1899.148147] env[62684]: DEBUG nova.scheduler.client.report [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1899.162548] env[62684]: DEBUG nova.virt.hardware [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1899.162860] env[62684]: DEBUG nova.virt.hardware [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1899.163451] env[62684]: DEBUG nova.virt.hardware [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1899.163451] env[62684]: DEBUG nova.virt.hardware [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1899.163451] env[62684]: DEBUG nova.virt.hardware [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1899.163604] env[62684]: DEBUG nova.virt.hardware [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1899.163974] env[62684]: DEBUG nova.virt.hardware [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1899.164221] env[62684]: DEBUG nova.virt.hardware [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1899.164421] env[62684]: DEBUG nova.virt.hardware [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1899.164421] env[62684]: DEBUG nova.virt.hardware [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1899.164837] env[62684]: DEBUG nova.virt.hardware [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1899.165700] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-652462a2-603d-4330-a6d0-130ec98b4919 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.175971] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-297d6110-f011-48de-beff-5f002ebf5b90 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.199865] env[62684]: DEBUG oslo_vmware.api [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2052793, 'name': PowerOffVM_Task, 'duration_secs': 0.181474} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1899.200088] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1899.200945] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a373eb96-0c02-4d2a-82f2-ece5880468ab {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.220116] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c10e810f-9be2-4d8a-a045-f5c8bc3d1b79 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.268526] env[62684]: DEBUG oslo_vmware.api [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Task: {'id': task-2052794, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.329620] env[62684]: DEBUG nova.compute.manager [req-955ef3e3-4bdc-4882-b9eb-6c68a2cf9e54 req-1511f20c-5041-4780-9fe7-b68c0701d877 service nova] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Received event network-vif-plugged-1ebe50b2-d6ab-48aa-b581-d2d09b588552 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1899.329929] env[62684]: DEBUG oslo_concurrency.lockutils [req-955ef3e3-4bdc-4882-b9eb-6c68a2cf9e54 req-1511f20c-5041-4780-9fe7-b68c0701d877 service nova] Acquiring lock "ca3d1a73-6f3b-4278-8fe7-03b66f407ba6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1899.330260] env[62684]: DEBUG oslo_concurrency.lockutils [req-955ef3e3-4bdc-4882-b9eb-6c68a2cf9e54 req-1511f20c-5041-4780-9fe7-b68c0701d877 service nova] Lock "ca3d1a73-6f3b-4278-8fe7-03b66f407ba6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1899.330460] env[62684]: DEBUG oslo_concurrency.lockutils [req-955ef3e3-4bdc-4882-b9eb-6c68a2cf9e54 req-1511f20c-5041-4780-9fe7-b68c0701d877 service nova] Lock "ca3d1a73-6f3b-4278-8fe7-03b66f407ba6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1899.330640] env[62684]: DEBUG nova.compute.manager [req-955ef3e3-4bdc-4882-b9eb-6c68a2cf9e54 req-1511f20c-5041-4780-9fe7-b68c0701d877 service nova] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] No waiting events found dispatching network-vif-plugged-1ebe50b2-d6ab-48aa-b581-d2d09b588552 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1899.330812] env[62684]: WARNING nova.compute.manager [req-955ef3e3-4bdc-4882-b9eb-6c68a2cf9e54 req-1511f20c-5041-4780-9fe7-b68c0701d877 service nova] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Received unexpected event network-vif-plugged-1ebe50b2-d6ab-48aa-b581-d2d09b588552 for instance with vm_state building and task_state spawning. [ 1899.418296] env[62684]: DEBUG nova.network.neutron [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Successfully updated port: 1ebe50b2-d6ab-48aa-b581-d2d09b588552 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1899.656658] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.565s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1899.657300] env[62684]: DEBUG nova.compute.manager [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1899.660056] env[62684]: DEBUG oslo_concurrency.lockutils [None req-abdd27a4-e762-4e3b-91f4-420d0975ed68 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.594s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1899.660291] env[62684]: DEBUG nova.objects.instance [None req-abdd27a4-e762-4e3b-91f4-420d0975ed68 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Lazy-loading 'resources' on Instance uuid a9dfeb4d-a92e-41cf-9d2f-43086cc9e868 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1899.731362] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Creating Snapshot of the VM instance {{(pid=62684) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1899.732020] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-b545de1e-ea96-439d-ac6f-fd51151eb8dd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.740218] env[62684]: DEBUG oslo_vmware.api [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 1899.740218] env[62684]: value = "task-2052795" [ 1899.740218] env[62684]: _type = "Task" [ 1899.740218] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1899.748470] env[62684]: DEBUG oslo_vmware.api [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2052795, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.768459] env[62684]: DEBUG oslo_vmware.api [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Task: {'id': task-2052794, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.921054] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquiring lock "refresh_cache-ca3d1a73-6f3b-4278-8fe7-03b66f407ba6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1899.921280] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquired lock "refresh_cache-ca3d1a73-6f3b-4278-8fe7-03b66f407ba6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1899.921448] env[62684]: DEBUG nova.network.neutron [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1900.163294] env[62684]: DEBUG nova.compute.utils [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1900.164880] env[62684]: DEBUG nova.compute.manager [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1900.165072] env[62684]: DEBUG nova.network.neutron [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1900.222961] env[62684]: DEBUG nova.policy [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6cc355a545ee470d8082f0a96dafe513', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '147d85277da2482db0c24803c664cb93', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1900.250231] env[62684]: DEBUG oslo_vmware.api [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2052795, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.272462] env[62684]: DEBUG oslo_vmware.api [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Task: {'id': task-2052794, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.456758} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1900.272462] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 26303c0e-be87-41ff-a15c-e92f91f8a05f/26303c0e-be87-41ff-a15c-e92f91f8a05f.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1900.273195] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1900.273195] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c06e3c30-3d46-4ce8-af9b-5e18d4890dbf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.280971] env[62684]: DEBUG oslo_vmware.api [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Waiting for the task: (returnval){ [ 1900.280971] env[62684]: value = "task-2052796" [ 1900.280971] env[62684]: _type = "Task" [ 1900.280971] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1900.290020] env[62684]: DEBUG oslo_vmware.api [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Task: {'id': task-2052796, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.458987] env[62684]: DEBUG nova.network.neutron [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1900.503915] env[62684]: DEBUG nova.network.neutron [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Successfully created port: 39c750a6-1076-4354-bc30-d7f50ca821b5 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1900.649541] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ceca812-0072-402e-b05e-45f2adb09304 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.658053] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc04ad49-229d-4343-84a7-0f0ef730b516 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.693811] env[62684]: DEBUG nova.compute.manager [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1900.698401] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aa98751-8a26-4b63-9503-699d0638826a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.707417] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dff4224-5f89-4019-90b7-bf72af4d9ae4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.722072] env[62684]: DEBUG nova.compute.provider_tree [None req-abdd27a4-e762-4e3b-91f4-420d0975ed68 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1900.751586] env[62684]: DEBUG oslo_vmware.api [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2052795, 'name': CreateSnapshot_Task, 'duration_secs': 0.99069} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1900.752269] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Created Snapshot of the VM instance {{(pid=62684) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1900.753346] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77bdbc6d-fd6f-42b8-9756-7fa9c12829dc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.790213] env[62684]: DEBUG oslo_vmware.api [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Task: {'id': task-2052796, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07067} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1900.791176] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1900.791958] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f45199fd-206c-4a51-a8b7-940bc2da6ede {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.819582] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Reconfiguring VM instance instance-0000002e to attach disk [datastore2] 26303c0e-be87-41ff-a15c-e92f91f8a05f/26303c0e-be87-41ff-a15c-e92f91f8a05f.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1900.820214] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-46bd457b-adea-4c7f-8116-465cbc26a85a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.841842] env[62684]: DEBUG oslo_vmware.api [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Waiting for the task: (returnval){ [ 1900.841842] env[62684]: value = "task-2052797" [ 1900.841842] env[62684]: _type = "Task" [ 1900.841842] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1900.852038] env[62684]: DEBUG oslo_vmware.api [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Task: {'id': task-2052797, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.949812] env[62684]: DEBUG nova.network.neutron [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Updating instance_info_cache with network_info: [{"id": "1ebe50b2-d6ab-48aa-b581-d2d09b588552", "address": "fa:16:3e:42:bf:40", "network": {"id": "bd253713-4e81-4c94-9689-22b81e7f51b6", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-307001665-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd812751722143fabedfa986a2d98b59", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ebe50b2-d6", "ovs_interfaceid": "1ebe50b2-d6ab-48aa-b581-d2d09b588552", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1901.226062] env[62684]: DEBUG nova.scheduler.client.report [None req-abdd27a4-e762-4e3b-91f4-420d0975ed68 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1901.271905] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Creating linked-clone VM from snapshot {{(pid=62684) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1901.272275] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-bb5e9814-5d21-4c7d-9f8e-e75a99d6e5a8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.281896] env[62684]: DEBUG oslo_vmware.api [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 1901.281896] env[62684]: value = "task-2052798" [ 1901.281896] env[62684]: _type = "Task" [ 1901.281896] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.291269] env[62684]: DEBUG oslo_vmware.api [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2052798, 'name': CloneVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.353109] env[62684]: DEBUG oslo_vmware.api [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Task: {'id': task-2052797, 'name': ReconfigVM_Task, 'duration_secs': 0.283477} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.353409] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Reconfigured VM instance instance-0000002e to attach disk [datastore2] 26303c0e-be87-41ff-a15c-e92f91f8a05f/26303c0e-be87-41ff-a15c-e92f91f8a05f.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1901.354312] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ffe01264-7728-42ce-9dda-3c73ea6d55b7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.360944] env[62684]: DEBUG oslo_vmware.api [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Waiting for the task: (returnval){ [ 1901.360944] env[62684]: value = "task-2052799" [ 1901.360944] env[62684]: _type = "Task" [ 1901.360944] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.369554] env[62684]: DEBUG oslo_vmware.api [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Task: {'id': task-2052799, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.452626] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Releasing lock "refresh_cache-ca3d1a73-6f3b-4278-8fe7-03b66f407ba6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1901.453162] env[62684]: DEBUG nova.compute.manager [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Instance network_info: |[{"id": "1ebe50b2-d6ab-48aa-b581-d2d09b588552", "address": "fa:16:3e:42:bf:40", "network": {"id": "bd253713-4e81-4c94-9689-22b81e7f51b6", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-307001665-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd812751722143fabedfa986a2d98b59", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ebe50b2-d6", "ovs_interfaceid": "1ebe50b2-d6ab-48aa-b581-d2d09b588552", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1901.454049] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:42:bf:40', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4055505f-97ab-400b-969c-43d99b38fd48', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1ebe50b2-d6ab-48aa-b581-d2d09b588552', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1901.467181] env[62684]: DEBUG oslo.service.loopingcall [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1901.471100] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1901.471100] env[62684]: DEBUG nova.compute.manager [req-a0d117ab-8237-4764-8972-9c1709afdfa1 req-b16ef859-ff47-4ec3-a3d9-46cf0b4f6dc5 service nova] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Received event network-changed-1ebe50b2-d6ab-48aa-b581-d2d09b588552 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1901.471100] env[62684]: DEBUG nova.compute.manager [req-a0d117ab-8237-4764-8972-9c1709afdfa1 req-b16ef859-ff47-4ec3-a3d9-46cf0b4f6dc5 service nova] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Refreshing instance network info cache due to event network-changed-1ebe50b2-d6ab-48aa-b581-d2d09b588552. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1901.471453] env[62684]: DEBUG oslo_concurrency.lockutils [req-a0d117ab-8237-4764-8972-9c1709afdfa1 req-b16ef859-ff47-4ec3-a3d9-46cf0b4f6dc5 service nova] Acquiring lock "refresh_cache-ca3d1a73-6f3b-4278-8fe7-03b66f407ba6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1901.471588] env[62684]: DEBUG oslo_concurrency.lockutils [req-a0d117ab-8237-4764-8972-9c1709afdfa1 req-b16ef859-ff47-4ec3-a3d9-46cf0b4f6dc5 service nova] Acquired lock "refresh_cache-ca3d1a73-6f3b-4278-8fe7-03b66f407ba6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1901.471751] env[62684]: DEBUG nova.network.neutron [req-a0d117ab-8237-4764-8972-9c1709afdfa1 req-b16ef859-ff47-4ec3-a3d9-46cf0b4f6dc5 service nova] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Refreshing network info cache for port 1ebe50b2-d6ab-48aa-b581-d2d09b588552 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1901.473333] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ffd66f61-f77e-4343-bc25-0a0903436e0e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.497619] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1901.497619] env[62684]: value = "task-2052800" [ 1901.497619] env[62684]: _type = "Task" [ 1901.497619] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.505996] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052800, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.713012] env[62684]: DEBUG nova.compute.manager [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1901.732331] env[62684]: DEBUG oslo_concurrency.lockutils [None req-abdd27a4-e762-4e3b-91f4-420d0975ed68 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.072s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1901.737258] env[62684]: DEBUG oslo_concurrency.lockutils [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 37.111s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1901.737472] env[62684]: DEBUG nova.objects.instance [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62684) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1901.747506] env[62684]: DEBUG nova.virt.hardware [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:47:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='d1dcf74d-6a75-42cb-977e-e0fc87b2d673',id=39,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1732807329',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1901.747759] env[62684]: DEBUG nova.virt.hardware [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1901.747925] env[62684]: DEBUG nova.virt.hardware [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1901.748133] env[62684]: DEBUG nova.virt.hardware [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1901.748290] env[62684]: DEBUG nova.virt.hardware [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1901.748448] env[62684]: DEBUG nova.virt.hardware [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1901.748670] env[62684]: DEBUG nova.virt.hardware [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1901.749017] env[62684]: DEBUG nova.virt.hardware [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1901.749262] env[62684]: DEBUG nova.virt.hardware [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1901.749448] env[62684]: DEBUG nova.virt.hardware [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1901.749634] env[62684]: DEBUG nova.virt.hardware [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1901.751433] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d506a7bf-6d7b-4d06-babf-83cbbf606ac5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.759425] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea2635db-a556-4a4f-ba50-7b28e7fca4e9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.763872] env[62684]: INFO nova.scheduler.client.report [None req-abdd27a4-e762-4e3b-91f4-420d0975ed68 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Deleted allocations for instance a9dfeb4d-a92e-41cf-9d2f-43086cc9e868 [ 1901.791096] env[62684]: DEBUG oslo_vmware.api [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2052798, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.871214] env[62684]: DEBUG oslo_vmware.api [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Task: {'id': task-2052799, 'name': Rename_Task, 'duration_secs': 0.364665} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.871584] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1901.871896] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-70bc03a0-fd86-4c54-bec3-244f4a5c2fec {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.878730] env[62684]: DEBUG oslo_vmware.api [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Waiting for the task: (returnval){ [ 1901.878730] env[62684]: value = "task-2052801" [ 1901.878730] env[62684]: _type = "Task" [ 1901.878730] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.888798] env[62684]: DEBUG oslo_vmware.api [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Task: {'id': task-2052801, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.007512] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052800, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.090725] env[62684]: DEBUG nova.network.neutron [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Successfully updated port: 39c750a6-1076-4354-bc30-d7f50ca821b5 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1902.273967] env[62684]: DEBUG oslo_concurrency.lockutils [None req-abdd27a4-e762-4e3b-91f4-420d0975ed68 tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Lock "a9dfeb4d-a92e-41cf-9d2f-43086cc9e868" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.845s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1902.295888] env[62684]: DEBUG oslo_vmware.api [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2052798, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.356567] env[62684]: DEBUG nova.network.neutron [req-a0d117ab-8237-4764-8972-9c1709afdfa1 req-b16ef859-ff47-4ec3-a3d9-46cf0b4f6dc5 service nova] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Updated VIF entry in instance network info cache for port 1ebe50b2-d6ab-48aa-b581-d2d09b588552. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1902.356567] env[62684]: DEBUG nova.network.neutron [req-a0d117ab-8237-4764-8972-9c1709afdfa1 req-b16ef859-ff47-4ec3-a3d9-46cf0b4f6dc5 service nova] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Updating instance_info_cache with network_info: [{"id": "1ebe50b2-d6ab-48aa-b581-d2d09b588552", "address": "fa:16:3e:42:bf:40", "network": {"id": "bd253713-4e81-4c94-9689-22b81e7f51b6", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-307001665-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd812751722143fabedfa986a2d98b59", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ebe50b2-d6", "ovs_interfaceid": "1ebe50b2-d6ab-48aa-b581-d2d09b588552", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1902.391476] env[62684]: DEBUG oslo_vmware.api [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Task: {'id': task-2052801, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.508846] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052800, 'name': CreateVM_Task, 'duration_secs': 0.756747} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1902.509248] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1902.509774] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1902.510338] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1902.510338] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1902.510604] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6aa9a2c-66ac-48db-8581-ad9eb0efe3e0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.517755] env[62684]: DEBUG oslo_vmware.api [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1902.517755] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b4eb9a-e062-c132-38f0-1328ebb6e071" [ 1902.517755] env[62684]: _type = "Task" [ 1902.517755] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1902.527294] env[62684]: DEBUG oslo_vmware.api [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b4eb9a-e062-c132-38f0-1328ebb6e071, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.595474] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquiring lock "refresh_cache-02dc8c41-5092-4f84-9722-37d4df3a459a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1902.595796] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquired lock "refresh_cache-02dc8c41-5092-4f84-9722-37d4df3a459a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1902.596119] env[62684]: DEBUG nova.network.neutron [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1902.749927] env[62684]: DEBUG oslo_concurrency.lockutils [None req-faec8774-dd2c-4d6d-a7e5-2dec9220500b tempest-ServersAdmin275Test-1208337468 tempest-ServersAdmin275Test-1208337468-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1902.751107] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b0765ddf-11d5-45af-89c3-7acdd89d5756 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.558s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1902.752055] env[62684]: DEBUG nova.objects.instance [None req-b0765ddf-11d5-45af-89c3-7acdd89d5756 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Lazy-loading 'resources' on Instance uuid e2a9ab56-bde3-40b6-a214-19c77a9c6778 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1902.793311] env[62684]: DEBUG oslo_vmware.api [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2052798, 'name': CloneVM_Task, 'duration_secs': 1.406672} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1902.793605] env[62684]: INFO nova.virt.vmwareapi.vmops [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Created linked-clone VM from snapshot [ 1902.794479] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5c57ff9-6cc8-478e-aac7-34770aeac9cf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.805978] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Uploading image 2116318e-3b46-4c4b-83f6-3ab3a26c5100 {{(pid=62684) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1902.836985] env[62684]: DEBUG oslo_vmware.rw_handles [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1902.836985] env[62684]: value = "vm-421260" [ 1902.836985] env[62684]: _type = "VirtualMachine" [ 1902.836985] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1902.837310] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-768d4fae-5dda-434e-808b-974cb9838af6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.845255] env[62684]: DEBUG oslo_vmware.rw_handles [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lease: (returnval){ [ 1902.845255] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52505f5a-3aff-6746-bd88-72f55532c930" [ 1902.845255] env[62684]: _type = "HttpNfcLease" [ 1902.845255] env[62684]: } obtained for exporting VM: (result){ [ 1902.845255] env[62684]: value = "vm-421260" [ 1902.845255] env[62684]: _type = "VirtualMachine" [ 1902.845255] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1902.845522] env[62684]: DEBUG oslo_vmware.api [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the lease: (returnval){ [ 1902.845522] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52505f5a-3aff-6746-bd88-72f55532c930" [ 1902.845522] env[62684]: _type = "HttpNfcLease" [ 1902.845522] env[62684]: } to be ready. {{(pid=62684) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1902.851830] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1902.851830] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52505f5a-3aff-6746-bd88-72f55532c930" [ 1902.851830] env[62684]: _type = "HttpNfcLease" [ 1902.851830] env[62684]: } is initializing. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1902.863465] env[62684]: DEBUG oslo_concurrency.lockutils [req-a0d117ab-8237-4764-8972-9c1709afdfa1 req-b16ef859-ff47-4ec3-a3d9-46cf0b4f6dc5 service nova] Releasing lock "refresh_cache-ca3d1a73-6f3b-4278-8fe7-03b66f407ba6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1902.863938] env[62684]: DEBUG oslo_concurrency.lockutils [None req-623e5e0c-4824-4f44-9b42-3f3a200dc2df tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Acquiring lock "ab2c7cbe-6f46-4174-bffb-055a15f2d56b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1902.864172] env[62684]: DEBUG oslo_concurrency.lockutils [None req-623e5e0c-4824-4f44-9b42-3f3a200dc2df tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Lock "ab2c7cbe-6f46-4174-bffb-055a15f2d56b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1902.864379] env[62684]: DEBUG oslo_concurrency.lockutils [None req-623e5e0c-4824-4f44-9b42-3f3a200dc2df tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Acquiring lock "ab2c7cbe-6f46-4174-bffb-055a15f2d56b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1902.864570] env[62684]: DEBUG oslo_concurrency.lockutils [None req-623e5e0c-4824-4f44-9b42-3f3a200dc2df tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Lock "ab2c7cbe-6f46-4174-bffb-055a15f2d56b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1902.864739] env[62684]: DEBUG oslo_concurrency.lockutils [None req-623e5e0c-4824-4f44-9b42-3f3a200dc2df tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Lock "ab2c7cbe-6f46-4174-bffb-055a15f2d56b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1902.866979] env[62684]: INFO nova.compute.manager [None req-623e5e0c-4824-4f44-9b42-3f3a200dc2df tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Terminating instance [ 1902.868838] env[62684]: DEBUG nova.compute.manager [None req-623e5e0c-4824-4f44-9b42-3f3a200dc2df tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1902.869050] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-623e5e0c-4824-4f44-9b42-3f3a200dc2df tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1902.869872] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d47e1c9-4451-436a-ac4c-e2d5968a20cd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.877623] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-623e5e0c-4824-4f44-9b42-3f3a200dc2df tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1902.877927] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ac05e361-2fb3-4a6e-a140-971a07e9aebe {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.889723] env[62684]: DEBUG oslo_vmware.api [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Task: {'id': task-2052801, 'name': PowerOnVM_Task, 'duration_secs': 0.582232} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1902.890790] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1902.891008] env[62684]: INFO nova.compute.manager [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Took 8.47 seconds to spawn the instance on the hypervisor. [ 1902.891239] env[62684]: DEBUG nova.compute.manager [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1902.891594] env[62684]: DEBUG oslo_vmware.api [None req-623e5e0c-4824-4f44-9b42-3f3a200dc2df tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Waiting for the task: (returnval){ [ 1902.891594] env[62684]: value = "task-2052803" [ 1902.891594] env[62684]: _type = "Task" [ 1902.891594] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1902.892430] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fbca65f-83d1-4308-9df8-2cf77d569c44 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.028756] env[62684]: DEBUG oslo_vmware.api [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b4eb9a-e062-c132-38f0-1328ebb6e071, 'name': SearchDatastore_Task, 'duration_secs': 0.014201} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1903.029235] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1903.029564] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1903.029876] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1903.030127] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1903.030395] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1903.030723] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-00eabb93-52c1-4d59-aec3-0732a5c2f7ef {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.045390] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1903.045594] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1903.046709] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4fe3c4da-54a5-4ca8-a282-8e0e118aeb25 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.052289] env[62684]: DEBUG oslo_vmware.api [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1903.052289] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523d8104-b4b4-c8f9-25cf-165118fa4726" [ 1903.052289] env[62684]: _type = "Task" [ 1903.052289] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1903.060319] env[62684]: DEBUG oslo_vmware.api [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523d8104-b4b4-c8f9-25cf-165118fa4726, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.145440] env[62684]: DEBUG nova.network.neutron [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1903.356166] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1903.356166] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52505f5a-3aff-6746-bd88-72f55532c930" [ 1903.356166] env[62684]: _type = "HttpNfcLease" [ 1903.356166] env[62684]: } is ready. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1903.356469] env[62684]: DEBUG oslo_vmware.rw_handles [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1903.356469] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52505f5a-3aff-6746-bd88-72f55532c930" [ 1903.356469] env[62684]: _type = "HttpNfcLease" [ 1903.356469] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1903.357247] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddb3b8e3-836b-4389-b4e3-54980ee36fa8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.366678] env[62684]: DEBUG oslo_vmware.rw_handles [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5261f706-bba3-259e-65d3-c65c0d7be25a/disk-0.vmdk from lease info. {{(pid=62684) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1903.366818] env[62684]: DEBUG oslo_vmware.rw_handles [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5261f706-bba3-259e-65d3-c65c0d7be25a/disk-0.vmdk for reading. {{(pid=62684) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1903.435490] env[62684]: DEBUG nova.network.neutron [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Updating instance_info_cache with network_info: [{"id": "39c750a6-1076-4354-bc30-d7f50ca821b5", "address": "fa:16:3e:16:fe:89", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.34", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39c750a6-10", "ovs_interfaceid": "39c750a6-1076-4354-bc30-d7f50ca821b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1903.444434] env[62684]: INFO nova.compute.manager [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Took 44.78 seconds to build instance. [ 1903.451237] env[62684]: DEBUG oslo_vmware.api [None req-623e5e0c-4824-4f44-9b42-3f3a200dc2df tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052803, 'name': PowerOffVM_Task, 'duration_secs': 0.210609} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1903.451647] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-623e5e0c-4824-4f44-9b42-3f3a200dc2df tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1903.451834] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-623e5e0c-4824-4f44-9b42-3f3a200dc2df tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1903.452142] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1f804d20-64ad-49c2-bddc-0da49aca3673 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.546085] env[62684]: DEBUG nova.compute.manager [req-45c8caf1-af2c-486d-b4de-fc4b2b74de47 req-c1374d4b-a7a1-43c3-83cc-ae58e534e4fe service nova] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Received event network-vif-plugged-39c750a6-1076-4354-bc30-d7f50ca821b5 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1903.546329] env[62684]: DEBUG oslo_concurrency.lockutils [req-45c8caf1-af2c-486d-b4de-fc4b2b74de47 req-c1374d4b-a7a1-43c3-83cc-ae58e534e4fe service nova] Acquiring lock "02dc8c41-5092-4f84-9722-37d4df3a459a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1903.546491] env[62684]: DEBUG oslo_concurrency.lockutils [req-45c8caf1-af2c-486d-b4de-fc4b2b74de47 req-c1374d4b-a7a1-43c3-83cc-ae58e534e4fe service nova] Lock "02dc8c41-5092-4f84-9722-37d4df3a459a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1903.546663] env[62684]: DEBUG oslo_concurrency.lockutils [req-45c8caf1-af2c-486d-b4de-fc4b2b74de47 req-c1374d4b-a7a1-43c3-83cc-ae58e534e4fe service nova] Lock "02dc8c41-5092-4f84-9722-37d4df3a459a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1903.546861] env[62684]: DEBUG nova.compute.manager [req-45c8caf1-af2c-486d-b4de-fc4b2b74de47 req-c1374d4b-a7a1-43c3-83cc-ae58e534e4fe service nova] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] No waiting events found dispatching network-vif-plugged-39c750a6-1076-4354-bc30-d7f50ca821b5 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1903.547048] env[62684]: WARNING nova.compute.manager [req-45c8caf1-af2c-486d-b4de-fc4b2b74de47 req-c1374d4b-a7a1-43c3-83cc-ae58e534e4fe service nova] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Received unexpected event network-vif-plugged-39c750a6-1076-4354-bc30-d7f50ca821b5 for instance with vm_state building and task_state spawning. [ 1903.547217] env[62684]: DEBUG nova.compute.manager [req-45c8caf1-af2c-486d-b4de-fc4b2b74de47 req-c1374d4b-a7a1-43c3-83cc-ae58e534e4fe service nova] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Received event network-changed-39c750a6-1076-4354-bc30-d7f50ca821b5 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1903.547378] env[62684]: DEBUG nova.compute.manager [req-45c8caf1-af2c-486d-b4de-fc4b2b74de47 req-c1374d4b-a7a1-43c3-83cc-ae58e534e4fe service nova] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Refreshing instance network info cache due to event network-changed-39c750a6-1076-4354-bc30-d7f50ca821b5. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1903.547584] env[62684]: DEBUG oslo_concurrency.lockutils [req-45c8caf1-af2c-486d-b4de-fc4b2b74de47 req-c1374d4b-a7a1-43c3-83cc-ae58e534e4fe service nova] Acquiring lock "refresh_cache-02dc8c41-5092-4f84-9722-37d4df3a459a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1903.567137] env[62684]: DEBUG oslo_vmware.api [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523d8104-b4b4-c8f9-25cf-165118fa4726, 'name': SearchDatastore_Task, 'duration_secs': 0.022318} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1903.567137] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20b198ef-a779-4c3a-ba15-98510d10b01f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.572184] env[62684]: DEBUG oslo_vmware.api [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1903.572184] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52728c76-0f7b-cb19-bbe7-51cd65cd77f3" [ 1903.572184] env[62684]: _type = "Task" [ 1903.572184] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1903.582254] env[62684]: DEBUG oslo_vmware.api [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52728c76-0f7b-cb19-bbe7-51cd65cd77f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.587854] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-4970fdfe-5398-4a6b-9f36-e85490ce2173 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.858398] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e50da10a-b945-4494-b5ad-15c3a06f1a9e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.866485] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afe19a09-dc55-41f3-bef8-300c620ab749 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.900346] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e335af5e-bb20-4dc4-9f28-501aedb6566b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.908235] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9f831ff-4d1f-401b-b698-163406e473b7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.922335] env[62684]: DEBUG nova.compute.provider_tree [None req-b0765ddf-11d5-45af-89c3-7acdd89d5756 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1903.945456] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Releasing lock "refresh_cache-02dc8c41-5092-4f84-9722-37d4df3a459a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1903.945853] env[62684]: DEBUG nova.compute.manager [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Instance network_info: |[{"id": "39c750a6-1076-4354-bc30-d7f50ca821b5", "address": "fa:16:3e:16:fe:89", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.34", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39c750a6-10", "ovs_interfaceid": "39c750a6-1076-4354-bc30-d7f50ca821b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1903.946224] env[62684]: DEBUG oslo_concurrency.lockutils [req-45c8caf1-af2c-486d-b4de-fc4b2b74de47 req-c1374d4b-a7a1-43c3-83cc-ae58e534e4fe service nova] Acquired lock "refresh_cache-02dc8c41-5092-4f84-9722-37d4df3a459a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1903.946418] env[62684]: DEBUG nova.network.neutron [req-45c8caf1-af2c-486d-b4de-fc4b2b74de47 req-c1374d4b-a7a1-43c3-83cc-ae58e534e4fe service nova] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Refreshing network info cache for port 39c750a6-1076-4354-bc30-d7f50ca821b5 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1903.947907] env[62684]: DEBUG oslo_concurrency.lockutils [None req-32636e91-249f-48ec-901c-e980815930c9 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Lock "26303c0e-be87-41ff-a15c-e92f91f8a05f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.486s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1903.948757] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:16:fe:89', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ded18042-834c-4792-b3e8-b1c377446432', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '39c750a6-1076-4354-bc30-d7f50ca821b5', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1903.957422] env[62684]: DEBUG oslo.service.loopingcall [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1903.960538] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1903.961141] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9f79085c-2d3a-4d80-bffb-d26160fea2cf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.985916] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1903.985916] env[62684]: value = "task-2052805" [ 1903.985916] env[62684]: _type = "Task" [ 1903.985916] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1903.995383] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052805, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.085975] env[62684]: DEBUG oslo_vmware.api [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52728c76-0f7b-cb19-bbe7-51cd65cd77f3, 'name': SearchDatastore_Task, 'duration_secs': 0.022909} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1904.086330] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1904.087096] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] ca3d1a73-6f3b-4278-8fe7-03b66f407ba6/ca3d1a73-6f3b-4278-8fe7-03b66f407ba6.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1904.087096] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6d8ab4f9-520b-459a-ae46-29a3655c3b70 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.094138] env[62684]: DEBUG oslo_vmware.api [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1904.094138] env[62684]: value = "task-2052806" [ 1904.094138] env[62684]: _type = "Task" [ 1904.094138] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1904.103225] env[62684]: DEBUG oslo_vmware.api [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052806, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.259141] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-623e5e0c-4824-4f44-9b42-3f3a200dc2df tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1904.259735] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-623e5e0c-4824-4f44-9b42-3f3a200dc2df tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1904.260133] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-623e5e0c-4824-4f44-9b42-3f3a200dc2df tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Deleting the datastore file [datastore1] ab2c7cbe-6f46-4174-bffb-055a15f2d56b {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1904.260634] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-40b41160-6db4-48f7-96d7-fb72e561ca69 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.270303] env[62684]: DEBUG oslo_vmware.api [None req-623e5e0c-4824-4f44-9b42-3f3a200dc2df tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Waiting for the task: (returnval){ [ 1904.270303] env[62684]: value = "task-2052807" [ 1904.270303] env[62684]: _type = "Task" [ 1904.270303] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1904.280529] env[62684]: DEBUG oslo_vmware.api [None req-623e5e0c-4824-4f44-9b42-3f3a200dc2df tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052807, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.426301] env[62684]: DEBUG nova.scheduler.client.report [None req-b0765ddf-11d5-45af-89c3-7acdd89d5756 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1904.463491] env[62684]: DEBUG nova.compute.manager [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1904.497219] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052805, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.607931] env[62684]: DEBUG oslo_vmware.api [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052806, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.780280] env[62684]: DEBUG oslo_vmware.api [None req-623e5e0c-4824-4f44-9b42-3f3a200dc2df tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Task: {'id': task-2052807, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.388967} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1904.780970] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-623e5e0c-4824-4f44-9b42-3f3a200dc2df tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1904.781262] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-623e5e0c-4824-4f44-9b42-3f3a200dc2df tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1904.781506] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-623e5e0c-4824-4f44-9b42-3f3a200dc2df tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1904.781778] env[62684]: INFO nova.compute.manager [None req-623e5e0c-4824-4f44-9b42-3f3a200dc2df tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Took 1.91 seconds to destroy the instance on the hypervisor. [ 1904.782107] env[62684]: DEBUG oslo.service.loopingcall [None req-623e5e0c-4824-4f44-9b42-3f3a200dc2df tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1904.782406] env[62684]: DEBUG nova.compute.manager [-] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1904.782536] env[62684]: DEBUG nova.network.neutron [-] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1904.810536] env[62684]: DEBUG nova.network.neutron [req-45c8caf1-af2c-486d-b4de-fc4b2b74de47 req-c1374d4b-a7a1-43c3-83cc-ae58e534e4fe service nova] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Updated VIF entry in instance network info cache for port 39c750a6-1076-4354-bc30-d7f50ca821b5. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1904.810921] env[62684]: DEBUG nova.network.neutron [req-45c8caf1-af2c-486d-b4de-fc4b2b74de47 req-c1374d4b-a7a1-43c3-83cc-ae58e534e4fe service nova] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Updating instance_info_cache with network_info: [{"id": "39c750a6-1076-4354-bc30-d7f50ca821b5", "address": "fa:16:3e:16:fe:89", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.34", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39c750a6-10", "ovs_interfaceid": "39c750a6-1076-4354-bc30-d7f50ca821b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1904.932990] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b0765ddf-11d5-45af-89c3-7acdd89d5756 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.182s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1904.937577] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.634s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1904.940277] env[62684]: INFO nova.compute.claims [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1904.961921] env[62684]: INFO nova.scheduler.client.report [None req-b0765ddf-11d5-45af-89c3-7acdd89d5756 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Deleted allocations for instance e2a9ab56-bde3-40b6-a214-19c77a9c6778 [ 1904.994024] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1905.001251] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052805, 'name': CreateVM_Task, 'duration_secs': 0.512071} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.001556] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1905.002657] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1905.003077] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1905.003497] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1905.003933] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a46133f-9705-48ea-bdae-5fb0509edf36 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.009935] env[62684]: DEBUG oslo_vmware.api [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for the task: (returnval){ [ 1905.009935] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52741ef1-f1bd-3a85-b64b-2d85d1ff3c4a" [ 1905.009935] env[62684]: _type = "Task" [ 1905.009935] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1905.021632] env[62684]: DEBUG oslo_vmware.api [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52741ef1-f1bd-3a85-b64b-2d85d1ff3c4a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.106599] env[62684]: DEBUG oslo_vmware.api [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052806, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.514441} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.107011] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] ca3d1a73-6f3b-4278-8fe7-03b66f407ba6/ca3d1a73-6f3b-4278-8fe7-03b66f407ba6.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1905.107867] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1905.108241] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2692a1e9-7dec-4c9e-af39-d93537fddbf9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.116105] env[62684]: DEBUG oslo_vmware.api [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1905.116105] env[62684]: value = "task-2052808" [ 1905.116105] env[62684]: _type = "Task" [ 1905.116105] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1905.124756] env[62684]: DEBUG oslo_vmware.api [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052808, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.314660] env[62684]: DEBUG oslo_concurrency.lockutils [req-45c8caf1-af2c-486d-b4de-fc4b2b74de47 req-c1374d4b-a7a1-43c3-83cc-ae58e534e4fe service nova] Releasing lock "refresh_cache-02dc8c41-5092-4f84-9722-37d4df3a459a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1905.473277] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b0765ddf-11d5-45af-89c3-7acdd89d5756 tempest-ServersAdmin275Test-734925456 tempest-ServersAdmin275Test-734925456-project-member] Lock "e2a9ab56-bde3-40b6-a214-19c77a9c6778" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.996s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1905.525981] env[62684]: DEBUG oslo_vmware.api [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52741ef1-f1bd-3a85-b64b-2d85d1ff3c4a, 'name': SearchDatastore_Task, 'duration_secs': 0.014813} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.526600] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1905.526881] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1905.527157] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1905.527406] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1905.527765] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1905.528211] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-014cc289-4d68-4c46-929a-61484bff50fa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.539930] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1905.540392] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1905.542378] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9167766c-51fd-4e88-8081-13f0d9ce15ff {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.548781] env[62684]: DEBUG oslo_vmware.api [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for the task: (returnval){ [ 1905.548781] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52007a52-9aa1-8698-25d2-f81b9bf90893" [ 1905.548781] env[62684]: _type = "Task" [ 1905.548781] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1905.561627] env[62684]: DEBUG oslo_vmware.api [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52007a52-9aa1-8698-25d2-f81b9bf90893, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.588509] env[62684]: DEBUG nova.network.neutron [-] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1905.592573] env[62684]: DEBUG nova.compute.manager [req-8ec2d06e-3ad8-409f-8bf6-b19a5603478f req-30f7fb53-2c6e-4a68-81e6-23e6f511a12d service nova] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Received event network-changed-d011162f-0ef0-4133-ac1b-f7ed8a3a8a3c {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1905.595166] env[62684]: DEBUG nova.compute.manager [req-8ec2d06e-3ad8-409f-8bf6-b19a5603478f req-30f7fb53-2c6e-4a68-81e6-23e6f511a12d service nova] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Refreshing instance network info cache due to event network-changed-d011162f-0ef0-4133-ac1b-f7ed8a3a8a3c. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1905.595166] env[62684]: DEBUG oslo_concurrency.lockutils [req-8ec2d06e-3ad8-409f-8bf6-b19a5603478f req-30f7fb53-2c6e-4a68-81e6-23e6f511a12d service nova] Acquiring lock "refresh_cache-26303c0e-be87-41ff-a15c-e92f91f8a05f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1905.595166] env[62684]: DEBUG oslo_concurrency.lockutils [req-8ec2d06e-3ad8-409f-8bf6-b19a5603478f req-30f7fb53-2c6e-4a68-81e6-23e6f511a12d service nova] Acquired lock "refresh_cache-26303c0e-be87-41ff-a15c-e92f91f8a05f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1905.595166] env[62684]: DEBUG nova.network.neutron [req-8ec2d06e-3ad8-409f-8bf6-b19a5603478f req-30f7fb53-2c6e-4a68-81e6-23e6f511a12d service nova] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Refreshing network info cache for port d011162f-0ef0-4133-ac1b-f7ed8a3a8a3c {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1905.627806] env[62684]: DEBUG oslo_vmware.api [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052808, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084461} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.628101] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1905.628874] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4384f97b-0ee4-4216-ae2e-26c7d054dc81 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.651220] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Reconfiguring VM instance instance-0000002f to attach disk [datastore2] ca3d1a73-6f3b-4278-8fe7-03b66f407ba6/ca3d1a73-6f3b-4278-8fe7-03b66f407ba6.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1905.651971] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6028fbc1-06ba-4efe-bfc0-f90c62785ede {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.672179] env[62684]: DEBUG oslo_vmware.api [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1905.672179] env[62684]: value = "task-2052809" [ 1905.672179] env[62684]: _type = "Task" [ 1905.672179] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1905.680738] env[62684]: DEBUG oslo_vmware.api [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052809, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.061636] env[62684]: DEBUG oslo_vmware.api [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52007a52-9aa1-8698-25d2-f81b9bf90893, 'name': SearchDatastore_Task, 'duration_secs': 0.02097} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1906.065839] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c25a6c6f-c345-4b30-93c4-a5e5d78f0a67 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.071699] env[62684]: DEBUG oslo_vmware.api [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for the task: (returnval){ [ 1906.071699] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52fce155-1e5e-5ae4-6c5a-7e99d528ddd5" [ 1906.071699] env[62684]: _type = "Task" [ 1906.071699] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1906.083045] env[62684]: DEBUG oslo_vmware.api [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52fce155-1e5e-5ae4-6c5a-7e99d528ddd5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.093823] env[62684]: INFO nova.compute.manager [-] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Took 1.31 seconds to deallocate network for instance. [ 1906.183531] env[62684]: DEBUG oslo_vmware.api [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052809, 'name': ReconfigVM_Task, 'duration_secs': 0.286576} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1906.187077] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Reconfigured VM instance instance-0000002f to attach disk [datastore2] ca3d1a73-6f3b-4278-8fe7-03b66f407ba6/ca3d1a73-6f3b-4278-8fe7-03b66f407ba6.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1906.188075] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5e2a8384-aa23-4f1d-a772-400f51f0f91b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.198767] env[62684]: DEBUG oslo_vmware.api [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1906.198767] env[62684]: value = "task-2052810" [ 1906.198767] env[62684]: _type = "Task" [ 1906.198767] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1906.211214] env[62684]: DEBUG oslo_vmware.api [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052810, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.373947] env[62684]: DEBUG nova.network.neutron [req-8ec2d06e-3ad8-409f-8bf6-b19a5603478f req-30f7fb53-2c6e-4a68-81e6-23e6f511a12d service nova] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Updated VIF entry in instance network info cache for port d011162f-0ef0-4133-ac1b-f7ed8a3a8a3c. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1906.374207] env[62684]: DEBUG nova.network.neutron [req-8ec2d06e-3ad8-409f-8bf6-b19a5603478f req-30f7fb53-2c6e-4a68-81e6-23e6f511a12d service nova] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Updating instance_info_cache with network_info: [{"id": "d011162f-0ef0-4133-ac1b-f7ed8a3a8a3c", "address": "fa:16:3e:3c:35:95", "network": {"id": "5bdb9a67-50c7-45c7-b9c4-bef767f22efc", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-2129055498-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.155", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17c9f5881b06427e969a783fe44135d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec763be6-4041-4651-8fd7-3820cf0ab86d", "external-id": "nsx-vlan-transportzone-943", "segmentation_id": 943, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd011162f-0e", "ovs_interfaceid": "d011162f-0ef0-4133-ac1b-f7ed8a3a8a3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1906.448606] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db126f7e-3923-42ac-983e-f9fd0cf9c9db {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.457497] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f73ed6e1-295e-4910-83ba-1f335fa9fed2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.491708] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-625877c7-6f6c-4087-b6e0-44d099a41726 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.499460] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88fc9682-9431-42b3-95b4-ab8eb4f157d6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.513805] env[62684]: DEBUG nova.compute.provider_tree [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1906.583974] env[62684]: DEBUG oslo_vmware.api [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52fce155-1e5e-5ae4-6c5a-7e99d528ddd5, 'name': SearchDatastore_Task, 'duration_secs': 0.013824} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1906.583974] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1906.585342] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 02dc8c41-5092-4f84-9722-37d4df3a459a/02dc8c41-5092-4f84-9722-37d4df3a459a.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1906.585886] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2cd55736-25cc-4a25-9225-39a2845e3d35 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.594007] env[62684]: DEBUG oslo_vmware.api [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for the task: (returnval){ [ 1906.594007] env[62684]: value = "task-2052811" [ 1906.594007] env[62684]: _type = "Task" [ 1906.594007] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1906.603776] env[62684]: DEBUG oslo_concurrency.lockutils [None req-623e5e0c-4824-4f44-9b42-3f3a200dc2df tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1906.607018] env[62684]: DEBUG oslo_vmware.api [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052811, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.712741] env[62684]: DEBUG oslo_vmware.api [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052810, 'name': Rename_Task, 'duration_secs': 0.138727} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1906.716019] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1906.716019] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b7ec7d4c-7fc1-4bf1-8a38-e71323f1add1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.719976] env[62684]: DEBUG oslo_vmware.api [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1906.719976] env[62684]: value = "task-2052812" [ 1906.719976] env[62684]: _type = "Task" [ 1906.719976] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1906.728748] env[62684]: DEBUG oslo_vmware.api [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052812, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.877221] env[62684]: DEBUG oslo_concurrency.lockutils [req-8ec2d06e-3ad8-409f-8bf6-b19a5603478f req-30f7fb53-2c6e-4a68-81e6-23e6f511a12d service nova] Releasing lock "refresh_cache-26303c0e-be87-41ff-a15c-e92f91f8a05f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1906.877539] env[62684]: DEBUG nova.compute.manager [req-8ec2d06e-3ad8-409f-8bf6-b19a5603478f req-30f7fb53-2c6e-4a68-81e6-23e6f511a12d service nova] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Received event network-vif-deleted-19976c5d-9288-4b98-b988-e0f5d4e855e9 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1906.877727] env[62684]: INFO nova.compute.manager [req-8ec2d06e-3ad8-409f-8bf6-b19a5603478f req-30f7fb53-2c6e-4a68-81e6-23e6f511a12d service nova] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Neutron deleted interface 19976c5d-9288-4b98-b988-e0f5d4e855e9; detaching it from the instance and deleting it from the info cache [ 1906.877910] env[62684]: DEBUG nova.network.neutron [req-8ec2d06e-3ad8-409f-8bf6-b19a5603478f req-30f7fb53-2c6e-4a68-81e6-23e6f511a12d service nova] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1907.039527] env[62684]: ERROR nova.scheduler.client.report [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [req-476a45d1-fea7-484d-8a6b-caeb4abe7b9e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c23c281e-ec1f-4876-972e-a98655f2084f. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-476a45d1-fea7-484d-8a6b-caeb4abe7b9e"}]} [ 1907.063929] env[62684]: DEBUG nova.scheduler.client.report [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Refreshing inventories for resource provider c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1907.080520] env[62684]: DEBUG nova.scheduler.client.report [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Updating ProviderTree inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1907.080759] env[62684]: DEBUG nova.compute.provider_tree [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1907.099233] env[62684]: DEBUG nova.scheduler.client.report [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Refreshing aggregate associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, aggregates: None {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1907.107503] env[62684]: DEBUG oslo_vmware.api [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052811, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.119255] env[62684]: DEBUG nova.scheduler.client.report [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Refreshing trait associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1907.230825] env[62684]: DEBUG oslo_vmware.api [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052812, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.381212] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-13bc1bd6-0dfe-40c2-935a-b5dfd1e49f37 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.390591] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2acce925-0482-4eee-a0a3-c260706d2c87 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.431116] env[62684]: DEBUG nova.compute.manager [req-8ec2d06e-3ad8-409f-8bf6-b19a5603478f req-30f7fb53-2c6e-4a68-81e6-23e6f511a12d service nova] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Detach interface failed, port_id=19976c5d-9288-4b98-b988-e0f5d4e855e9, reason: Instance ab2c7cbe-6f46-4174-bffb-055a15f2d56b could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1907.607048] env[62684]: DEBUG oslo_vmware.api [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052811, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.639122] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9381239-2486-4f74-b238-66c1bd946613 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.645156] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-162002f3-bf1f-4e42-9c64-e0bc0d5dcde2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.681622] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f30a3b06-0bf6-4bc1-ad44-776a7bc893cb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.689522] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b4d1415-b10b-45dd-a9b8-660305e22a54 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.704712] env[62684]: DEBUG nova.compute.provider_tree [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1907.729128] env[62684]: DEBUG oslo_vmware.api [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052812, 'name': PowerOnVM_Task, 'duration_secs': 0.670545} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1907.729409] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1907.729632] env[62684]: INFO nova.compute.manager [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Took 8.60 seconds to spawn the instance on the hypervisor. [ 1907.729832] env[62684]: DEBUG nova.compute.manager [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1907.730631] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f66b697-9fde-41eb-a27e-9a4d6976e518 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.105643] env[62684]: DEBUG oslo_vmware.api [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052811, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.249223] env[62684]: DEBUG nova.scheduler.client.report [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Updated inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f with generation 75 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1908.249223] env[62684]: DEBUG nova.compute.provider_tree [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Updating resource provider c23c281e-ec1f-4876-972e-a98655f2084f generation from 75 to 76 during operation: update_inventory {{(pid=62684) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1908.249223] env[62684]: DEBUG nova.compute.provider_tree [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1908.260575] env[62684]: INFO nova.compute.manager [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Took 48.17 seconds to build instance. [ 1908.605431] env[62684]: DEBUG oslo_vmware.api [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052811, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.682199} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1908.605704] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 02dc8c41-5092-4f84-9722-37d4df3a459a/02dc8c41-5092-4f84-9722-37d4df3a459a.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1908.605960] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1908.606248] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1426d07c-285e-4ff4-a68b-786c57e3e070 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.613108] env[62684]: DEBUG oslo_vmware.api [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for the task: (returnval){ [ 1908.613108] env[62684]: value = "task-2052813" [ 1908.613108] env[62684]: _type = "Task" [ 1908.613108] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1908.621277] env[62684]: DEBUG oslo_vmware.api [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052813, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.757658] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.819s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1908.757658] env[62684]: DEBUG nova.compute.manager [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1908.760451] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5a8008a9-5453-4500-93da-cdc922184b64 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.574s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1908.760734] env[62684]: DEBUG nova.objects.instance [None req-5a8008a9-5453-4500-93da-cdc922184b64 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Lazy-loading 'resources' on Instance uuid 6b1f0e69-3915-40dc-b4ec-93ab174f12b6 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1908.762283] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0edf2b3d-6f9a-490f-a6e9-afe1a9495e09 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lock "ca3d1a73-6f3b-4278-8fe7-03b66f407ba6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.953s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1908.948946] env[62684]: INFO nova.compute.manager [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Rescuing [ 1908.949283] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquiring lock "refresh_cache-ca3d1a73-6f3b-4278-8fe7-03b66f407ba6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1908.949486] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquired lock "refresh_cache-ca3d1a73-6f3b-4278-8fe7-03b66f407ba6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1908.949698] env[62684]: DEBUG nova.network.neutron [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1909.125083] env[62684]: DEBUG oslo_vmware.api [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052813, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.163905} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1909.125748] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1909.126674] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93136518-b252-4730-8a8f-018b729e48a1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.150231] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Reconfiguring VM instance instance-00000030 to attach disk [datastore2] 02dc8c41-5092-4f84-9722-37d4df3a459a/02dc8c41-5092-4f84-9722-37d4df3a459a.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1909.150525] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-35896cd8-07d1-4f89-9476-d4b92ec738a7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.170496] env[62684]: DEBUG oslo_vmware.api [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for the task: (returnval){ [ 1909.170496] env[62684]: value = "task-2052814" [ 1909.170496] env[62684]: _type = "Task" [ 1909.170496] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1909.179697] env[62684]: DEBUG oslo_vmware.api [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052814, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1909.264165] env[62684]: DEBUG nova.compute.utils [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1909.266505] env[62684]: DEBUG nova.compute.manager [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Not allocating networking since 'none' was specified. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1909.269399] env[62684]: DEBUG nova.compute.manager [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1909.685989] env[62684]: DEBUG oslo_vmware.api [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052814, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1909.720022] env[62684]: DEBUG nova.network.neutron [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Updating instance_info_cache with network_info: [{"id": "1ebe50b2-d6ab-48aa-b581-d2d09b588552", "address": "fa:16:3e:42:bf:40", "network": {"id": "bd253713-4e81-4c94-9689-22b81e7f51b6", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-307001665-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd812751722143fabedfa986a2d98b59", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ebe50b2-d6", "ovs_interfaceid": "1ebe50b2-d6ab-48aa-b581-d2d09b588552", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1909.773152] env[62684]: DEBUG nova.compute.manager [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1909.798631] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1909.845374] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cc6143f-3460-4fbc-b403-042c5e2c7743 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.854742] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4fbf8d4-2012-4073-952b-d83500d2ae57 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.889924] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0806a62f-8b43-46a3-ac8a-7a3789b8ee5d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.901167] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa54942d-df7b-4d34-a889-614c0b3eb4bf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.916374] env[62684]: DEBUG nova.compute.provider_tree [None req-5a8008a9-5453-4500-93da-cdc922184b64 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1910.093966] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Acquiring lock "dab11b88-ac23-43f0-9203-024faf41e1f5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1910.094354] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Lock "dab11b88-ac23-43f0-9203-024faf41e1f5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1910.183679] env[62684]: DEBUG oslo_vmware.api [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052814, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1910.224462] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Releasing lock "refresh_cache-ca3d1a73-6f3b-4278-8fe7-03b66f407ba6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1910.420570] env[62684]: DEBUG nova.scheduler.client.report [None req-5a8008a9-5453-4500-93da-cdc922184b64 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1910.682927] env[62684]: DEBUG oslo_vmware.api [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052814, 'name': ReconfigVM_Task, 'duration_secs': 1.054031} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1910.684365] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Reconfigured VM instance instance-00000030 to attach disk [datastore2] 02dc8c41-5092-4f84-9722-37d4df3a459a/02dc8c41-5092-4f84-9722-37d4df3a459a.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1910.684365] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3a8786c7-e073-4fd5-ab99-93aef41aca3c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.691903] env[62684]: DEBUG oslo_vmware.api [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for the task: (returnval){ [ 1910.691903] env[62684]: value = "task-2052815" [ 1910.691903] env[62684]: _type = "Task" [ 1910.691903] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1910.700710] env[62684]: DEBUG oslo_vmware.api [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052815, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1910.766401] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1910.766587] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-26c0eaea-a9f7-47de-8c2c-a938daee6bee {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.773167] env[62684]: DEBUG oslo_vmware.api [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1910.773167] env[62684]: value = "task-2052816" [ 1910.773167] env[62684]: _type = "Task" [ 1910.773167] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1910.780819] env[62684]: DEBUG oslo_vmware.api [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052816, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1910.789131] env[62684]: DEBUG nova.compute.manager [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1910.811793] env[62684]: DEBUG nova.virt.hardware [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1910.811999] env[62684]: DEBUG nova.virt.hardware [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1910.812272] env[62684]: DEBUG nova.virt.hardware [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1910.812585] env[62684]: DEBUG nova.virt.hardware [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1910.812840] env[62684]: DEBUG nova.virt.hardware [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1910.813091] env[62684]: DEBUG nova.virt.hardware [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1910.813377] env[62684]: DEBUG nova.virt.hardware [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1910.813639] env[62684]: DEBUG nova.virt.hardware [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1910.813902] env[62684]: DEBUG nova.virt.hardware [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1910.814497] env[62684]: DEBUG nova.virt.hardware [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1910.814692] env[62684]: DEBUG nova.virt.hardware [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1910.815555] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ce6b15a-7c68-4f81-8c86-fcc68308c0ca {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.823806] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c228b702-f12b-457d-8d02-0f3f13004afa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.840039] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Instance VIF info [] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1910.845332] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Creating folder: Project (2d944b7ab91a4e6c830f270a02afb129). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1910.845726] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d4e6f6e9-d3a9-4084-b7b0-a627ef0fe87f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.856257] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Created folder: Project (2d944b7ab91a4e6c830f270a02afb129) in parent group-v421118. [ 1910.857565] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Creating folder: Instances. Parent ref: group-v421263. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1910.857565] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bcee8ff2-39a1-4c8d-b411-c946c1850743 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.866451] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Created folder: Instances in parent group-v421263. [ 1910.866755] env[62684]: DEBUG oslo.service.loopingcall [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1910.866983] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1910.868040] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cd755722-cf30-4fdf-8fb3-51963ff759d4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.884663] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1910.884663] env[62684]: value = "task-2052819" [ 1910.884663] env[62684]: _type = "Task" [ 1910.884663] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1910.894192] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052819, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1910.925615] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5a8008a9-5453-4500-93da-cdc922184b64 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.165s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1910.929148] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9decd35f-e52e-4437-8c90-2d11076e90af tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.410s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1910.929777] env[62684]: DEBUG nova.objects.instance [None req-9decd35f-e52e-4437-8c90-2d11076e90af tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Lazy-loading 'resources' on Instance uuid 73f27fc0-ebae-41c7-b292-14396f79a5a2 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1910.954176] env[62684]: INFO nova.scheduler.client.report [None req-5a8008a9-5453-4500-93da-cdc922184b64 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Deleted allocations for instance 6b1f0e69-3915-40dc-b4ec-93ab174f12b6 [ 1911.212985] env[62684]: DEBUG oslo_vmware.api [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052815, 'name': Rename_Task, 'duration_secs': 0.242636} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1911.213438] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1911.213822] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2a0ed8f7-c894-450c-b06b-ccfe258ba7dc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.221601] env[62684]: DEBUG oslo_vmware.api [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for the task: (returnval){ [ 1911.221601] env[62684]: value = "task-2052820" [ 1911.221601] env[62684]: _type = "Task" [ 1911.221601] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1911.237410] env[62684]: DEBUG oslo_vmware.api [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052820, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.283057] env[62684]: DEBUG oslo_vmware.api [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052816, 'name': PowerOffVM_Task, 'duration_secs': 0.294278} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1911.283320] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1911.284144] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4e303ea-7c9e-4cbb-a558-61e6d0cec826 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.305615] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92f91e7d-bd2b-4198-8e10-d19ca184721f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.340780] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1911.341096] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7fee1353-c556-489c-af6b-a70b6840a9af {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.348972] env[62684]: DEBUG oslo_vmware.api [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1911.348972] env[62684]: value = "task-2052821" [ 1911.348972] env[62684]: _type = "Task" [ 1911.348972] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1911.359248] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] VM already powered off {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1911.360579] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1911.360579] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1911.360579] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1911.360579] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1911.360579] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e4a493fd-59fe-4560-b091-ff85e611c9d3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.369520] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1911.369713] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1911.371510] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50e9fff9-b8cc-4f50-9a4d-a7dd925f0d11 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.379795] env[62684]: DEBUG oslo_vmware.api [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1911.379795] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]528cadb5-d7de-c322-5ac9-ef59ca060bc8" [ 1911.379795] env[62684]: _type = "Task" [ 1911.379795] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1911.387871] env[62684]: DEBUG oslo_vmware.api [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]528cadb5-d7de-c322-5ac9-ef59ca060bc8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.395774] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052819, 'name': CreateVM_Task, 'duration_secs': 0.351349} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1911.395943] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1911.396394] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1911.396559] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1911.396918] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1911.397191] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec53ba52-89d9-4bbe-ae5c-78ab9b00dc11 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.402574] env[62684]: DEBUG oslo_vmware.api [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Waiting for the task: (returnval){ [ 1911.402574] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a9b1c8-b464-c692-1e28-674adee74056" [ 1911.402574] env[62684]: _type = "Task" [ 1911.402574] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1911.410336] env[62684]: DEBUG oslo_vmware.api [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a9b1c8-b464-c692-1e28-674adee74056, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.467532] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5a8008a9-5453-4500-93da-cdc922184b64 tempest-FloatingIPsAssociationTestJSON-557488177 tempest-FloatingIPsAssociationTestJSON-557488177-project-member] Lock "6b1f0e69-3915-40dc-b4ec-93ab174f12b6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.235s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1911.732441] env[62684]: DEBUG oslo_vmware.api [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052820, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.877662] env[62684]: DEBUG oslo_concurrency.lockutils [None req-887f4639-845a-4ecf-bac2-f81235610243 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "interface-4e5152b0-7bac-4dc2-b6c7-6590fa2d5978-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1911.877912] env[62684]: DEBUG oslo_concurrency.lockutils [None req-887f4639-845a-4ecf-bac2-f81235610243 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "interface-4e5152b0-7bac-4dc2-b6c7-6590fa2d5978-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1911.878291] env[62684]: DEBUG nova.objects.instance [None req-887f4639-845a-4ecf-bac2-f81235610243 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lazy-loading 'flavor' on Instance uuid 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1911.889085] env[62684]: DEBUG oslo_vmware.api [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]528cadb5-d7de-c322-5ac9-ef59ca060bc8, 'name': SearchDatastore_Task, 'duration_secs': 0.01784} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1911.890137] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a78d3f2-e1ae-42da-aa22-c99e2c77643a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.898875] env[62684]: DEBUG oslo_vmware.api [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1911.898875] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5216a945-ea01-c077-e20a-e80d52b2caa3" [ 1911.898875] env[62684]: _type = "Task" [ 1911.898875] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1911.912046] env[62684]: DEBUG oslo_vmware.api [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5216a945-ea01-c077-e20a-e80d52b2caa3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.918133] env[62684]: DEBUG oslo_vmware.api [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a9b1c8-b464-c692-1e28-674adee74056, 'name': SearchDatastore_Task, 'duration_secs': 0.035989} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1911.918976] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1911.919267] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1911.919555] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1911.919726] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1911.919940] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1911.920228] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-32575fa3-1b03-4bb5-886e-81f7b728b243 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.928681] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1911.928903] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1911.932472] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a8bbac4-8d2f-429f-a206-da128528e33c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.938763] env[62684]: DEBUG oslo_vmware.api [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Waiting for the task: (returnval){ [ 1911.938763] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521df569-1611-3dd0-c47a-73a5e22f0457" [ 1911.938763] env[62684]: _type = "Task" [ 1911.938763] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1911.946759] env[62684]: DEBUG oslo_vmware.api [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521df569-1611-3dd0-c47a-73a5e22f0457, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.961918] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d0953e4-b9dc-4573-8e7c-c77e2a5b9ecd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.970538] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59e880b8-7195-41a6-b18b-65378838ec9b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.002514] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c9d8402-acf6-46cb-ad73-512de41c2378 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.010258] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-858ee1d6-1917-4a2d-9ce6-c73fda5951c3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.024278] env[62684]: DEBUG nova.compute.provider_tree [None req-9decd35f-e52e-4437-8c90-2d11076e90af tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1912.233887] env[62684]: DEBUG oslo_vmware.api [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052820, 'name': PowerOnVM_Task, 'duration_secs': 0.897057} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1912.234200] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1912.234409] env[62684]: INFO nova.compute.manager [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Took 10.52 seconds to spawn the instance on the hypervisor. [ 1912.234596] env[62684]: DEBUG nova.compute.manager [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1912.235398] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9858c355-cfc4-43cd-9637-808d20abcbc4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.409476] env[62684]: DEBUG oslo_vmware.api [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5216a945-ea01-c077-e20a-e80d52b2caa3, 'name': SearchDatastore_Task, 'duration_secs': 0.025968} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1912.410120] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1912.410396] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] ca3d1a73-6f3b-4278-8fe7-03b66f407ba6/3931321c-cb4c-4b87-8d3a-50e05ea01db2-rescue.vmdk. {{(pid=62684) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1912.410756] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-33c5e591-f6ab-4108-b96b-d44d012db8b6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.418612] env[62684]: DEBUG oslo_vmware.api [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1912.418612] env[62684]: value = "task-2052822" [ 1912.418612] env[62684]: _type = "Task" [ 1912.418612] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1912.428571] env[62684]: DEBUG oslo_vmware.api [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052822, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1912.448709] env[62684]: DEBUG oslo_vmware.api [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521df569-1611-3dd0-c47a-73a5e22f0457, 'name': SearchDatastore_Task, 'duration_secs': 0.017198} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1912.449602] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22dd7d2f-d730-465f-ac07-0ba4bbf46aa6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.458654] env[62684]: DEBUG oslo_vmware.api [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Waiting for the task: (returnval){ [ 1912.458654] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52536eaa-ec7c-d8c5-1655-e35ba997cf81" [ 1912.458654] env[62684]: _type = "Task" [ 1912.458654] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1912.466803] env[62684]: DEBUG oslo_vmware.api [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52536eaa-ec7c-d8c5-1655-e35ba997cf81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1912.499932] env[62684]: DEBUG nova.objects.instance [None req-887f4639-845a-4ecf-bac2-f81235610243 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lazy-loading 'pci_requests' on Instance uuid 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1912.530313] env[62684]: DEBUG nova.scheduler.client.report [None req-9decd35f-e52e-4437-8c90-2d11076e90af tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1912.757811] env[62684]: INFO nova.compute.manager [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Took 49.63 seconds to build instance. [ 1912.929734] env[62684]: DEBUG oslo_vmware.api [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052822, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1912.970960] env[62684]: DEBUG oslo_vmware.api [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52536eaa-ec7c-d8c5-1655-e35ba997cf81, 'name': SearchDatastore_Task, 'duration_secs': 0.024956} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1912.971513] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1912.971684] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 8449f09b-4e7b-4511-bb3c-2ff6667addb2/8449f09b-4e7b-4511-bb3c-2ff6667addb2.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1912.972156] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c06c304f-fedb-482b-8733-79252fc9f874 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.983470] env[62684]: DEBUG oslo_vmware.api [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Waiting for the task: (returnval){ [ 1912.983470] env[62684]: value = "task-2052823" [ 1912.983470] env[62684]: _type = "Task" [ 1912.983470] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1912.992978] env[62684]: DEBUG oslo_vmware.api [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': task-2052823, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1913.003766] env[62684]: DEBUG nova.objects.base [None req-887f4639-845a-4ecf-bac2-f81235610243 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Object Instance<4e5152b0-7bac-4dc2-b6c7-6590fa2d5978> lazy-loaded attributes: flavor,pci_requests {{(pid=62684) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1913.003766] env[62684]: DEBUG nova.network.neutron [None req-887f4639-845a-4ecf-bac2-f81235610243 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1913.036214] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9decd35f-e52e-4437-8c90-2d11076e90af tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.107s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1913.041143] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.330s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1913.041827] env[62684]: INFO nova.compute.claims [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1913.069909] env[62684]: DEBUG nova.policy [None req-887f4639-845a-4ecf-bac2-f81235610243 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e957449ae9d24bdaba38b3db704d3d61', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5cb4900a999e467bafdfd1fb407a82f4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1913.075347] env[62684]: INFO nova.scheduler.client.report [None req-9decd35f-e52e-4437-8c90-2d11076e90af tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Deleted allocations for instance 73f27fc0-ebae-41c7-b292-14396f79a5a2 [ 1913.259713] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4c5f80c9-bbfe-4650-8341-bcd91760b726 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Lock "02dc8c41-5092-4f84-9722-37d4df3a459a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 88.901s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1913.407966] env[62684]: DEBUG nova.network.neutron [None req-887f4639-845a-4ecf-bac2-f81235610243 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Successfully created port: 57b8942c-9a9b-4d95-bc8c-f4367c62ba17 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1913.431667] env[62684]: DEBUG oslo_vmware.api [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052822, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.680524} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1913.431961] env[62684]: INFO nova.virt.vmwareapi.ds_util [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] ca3d1a73-6f3b-4278-8fe7-03b66f407ba6/3931321c-cb4c-4b87-8d3a-50e05ea01db2-rescue.vmdk. [ 1913.432763] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca7840d3-ddcd-47ff-af77-0d2d5927c6ed {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.468663] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Reconfiguring VM instance instance-0000002f to attach disk [datastore2] ca3d1a73-6f3b-4278-8fe7-03b66f407ba6/3931321c-cb4c-4b87-8d3a-50e05ea01db2-rescue.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1913.469061] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73419c3f-e783-41cd-be87-b77e3e7ef206 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.490113] env[62684]: DEBUG oslo_vmware.api [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1913.490113] env[62684]: value = "task-2052824" [ 1913.490113] env[62684]: _type = "Task" [ 1913.490113] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1913.496784] env[62684]: DEBUG oslo_vmware.api [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': task-2052823, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1913.500209] env[62684]: DEBUG oslo_vmware.api [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052824, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1913.584656] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9decd35f-e52e-4437-8c90-2d11076e90af tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Lock "73f27fc0-ebae-41c7-b292-14396f79a5a2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.326s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1913.762453] env[62684]: DEBUG nova.compute.manager [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1913.997066] env[62684]: DEBUG oslo_vmware.api [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': task-2052823, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.812549} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1913.999511] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 8449f09b-4e7b-4511-bb3c-2ff6667addb2/8449f09b-4e7b-4511-bb3c-2ff6667addb2.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1913.999891] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1914.000657] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9284dc9c-4e32-4ff2-80ae-99db5fd71e2d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.009634] env[62684]: DEBUG oslo_vmware.api [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052824, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1914.011757] env[62684]: DEBUG oslo_vmware.api [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Waiting for the task: (returnval){ [ 1914.011757] env[62684]: value = "task-2052825" [ 1914.011757] env[62684]: _type = "Task" [ 1914.011757] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1914.020849] env[62684]: DEBUG oslo_vmware.api [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': task-2052825, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1914.113693] env[62684]: DEBUG oslo_vmware.rw_handles [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5261f706-bba3-259e-65d3-c65c0d7be25a/disk-0.vmdk. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1914.114595] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa412078-80ef-421a-acb9-daadd7e79913 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.123522] env[62684]: DEBUG oslo_vmware.rw_handles [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5261f706-bba3-259e-65d3-c65c0d7be25a/disk-0.vmdk is in state: ready. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1914.123721] env[62684]: ERROR oslo_vmware.rw_handles [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5261f706-bba3-259e-65d3-c65c0d7be25a/disk-0.vmdk due to incomplete transfer. [ 1914.123979] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-0b769ebd-bb70-4e7c-80d1-5155566d43bf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.131118] env[62684]: DEBUG oslo_vmware.rw_handles [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5261f706-bba3-259e-65d3-c65c0d7be25a/disk-0.vmdk. {{(pid=62684) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1914.131352] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Uploaded image 2116318e-3b46-4c4b-83f6-3ab3a26c5100 to the Glance image server {{(pid=62684) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1914.133526] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Destroying the VM {{(pid=62684) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1914.136582] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-000792ec-d09b-4de1-9b5c-1d8eb8f98233 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.142889] env[62684]: DEBUG oslo_vmware.api [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 1914.142889] env[62684]: value = "task-2052826" [ 1914.142889] env[62684]: _type = "Task" [ 1914.142889] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1914.153726] env[62684]: DEBUG oslo_vmware.api [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2052826, 'name': Destroy_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1914.283813] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1914.509013] env[62684]: DEBUG oslo_vmware.api [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052824, 'name': ReconfigVM_Task, 'duration_secs': 0.604953} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1914.509351] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Reconfigured VM instance instance-0000002f to attach disk [datastore2] ca3d1a73-6f3b-4278-8fe7-03b66f407ba6/3931321c-cb4c-4b87-8d3a-50e05ea01db2-rescue.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1914.510246] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf13daf2-1c30-45b7-9fb3-f44d73172ceb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.523235] env[62684]: DEBUG oslo_vmware.api [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': task-2052825, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068296} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1914.539993] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1914.548918] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-798c2fd5-2ef1-4d2a-b5cf-ca5f94bbd300 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.551588] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a05af4de-4855-44e9-a421-bea4d74bdb83 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.581301] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Reconfiguring VM instance instance-00000031 to attach disk [datastore1] 8449f09b-4e7b-4511-bb3c-2ff6667addb2/8449f09b-4e7b-4511-bb3c-2ff6667addb2.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1914.584233] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-affe3713-ee29-4093-9b33-2ec206e4373b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.600018] env[62684]: DEBUG oslo_vmware.api [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1914.600018] env[62684]: value = "task-2052827" [ 1914.600018] env[62684]: _type = "Task" [ 1914.600018] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1914.600812] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee9a6dd8-ac5b-41af-90c6-52a077c7656a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.609008] env[62684]: DEBUG oslo_vmware.api [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Waiting for the task: (returnval){ [ 1914.609008] env[62684]: value = "task-2052828" [ 1914.609008] env[62684]: _type = "Task" [ 1914.609008] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1914.613941] env[62684]: DEBUG oslo_vmware.api [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052827, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1914.617627] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b76b32dc-d8a3-4de7-b07b-0e7f0d41cd84 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.626245] env[62684]: DEBUG oslo_vmware.api [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': task-2052828, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1914.653533] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4f61a1d-23db-4005-93a7-cd4c0c316959 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.661595] env[62684]: DEBUG oslo_vmware.api [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2052826, 'name': Destroy_Task} progress is 33%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1914.665131] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1744a3d3-2a6e-4c9a-b6a1-47089c5d8f6d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.678823] env[62684]: DEBUG nova.compute.provider_tree [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1914.966392] env[62684]: DEBUG nova.network.neutron [None req-887f4639-845a-4ecf-bac2-f81235610243 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Successfully updated port: 57b8942c-9a9b-4d95-bc8c-f4367c62ba17 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1915.115339] env[62684]: DEBUG oslo_vmware.api [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052827, 'name': ReconfigVM_Task, 'duration_secs': 0.153991} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1915.118669] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1915.119659] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4022653f-4e76-48ef-897b-fc76170a1ede {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.126654] env[62684]: DEBUG oslo_vmware.api [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': task-2052828, 'name': ReconfigVM_Task, 'duration_secs': 0.369781} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1915.127961] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Reconfigured VM instance instance-00000031 to attach disk [datastore1] 8449f09b-4e7b-4511-bb3c-2ff6667addb2/8449f09b-4e7b-4511-bb3c-2ff6667addb2.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1915.128700] env[62684]: DEBUG oslo_vmware.api [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1915.128700] env[62684]: value = "task-2052829" [ 1915.128700] env[62684]: _type = "Task" [ 1915.128700] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1915.129010] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e8192d05-04f3-47ba-9c0e-d40aa9c9f2ac {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.139802] env[62684]: DEBUG oslo_vmware.api [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052829, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.141133] env[62684]: DEBUG oslo_vmware.api [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Waiting for the task: (returnval){ [ 1915.141133] env[62684]: value = "task-2052830" [ 1915.141133] env[62684]: _type = "Task" [ 1915.141133] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1915.150031] env[62684]: DEBUG oslo_vmware.api [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': task-2052830, 'name': Rename_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.158282] env[62684]: DEBUG oslo_vmware.api [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2052826, 'name': Destroy_Task, 'duration_secs': 0.756072} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1915.158552] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Destroyed the VM [ 1915.158798] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Deleting Snapshot of the VM instance {{(pid=62684) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1915.159073] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-47c01652-fc3b-4516-a70e-490c0d2bdf39 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.167164] env[62684]: DEBUG oslo_vmware.api [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 1915.167164] env[62684]: value = "task-2052831" [ 1915.167164] env[62684]: _type = "Task" [ 1915.167164] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1915.184821] env[62684]: DEBUG nova.scheduler.client.report [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1915.193033] env[62684]: DEBUG oslo_vmware.api [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2052831, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.471017] env[62684]: DEBUG oslo_concurrency.lockutils [None req-887f4639-845a-4ecf-bac2-f81235610243 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "refresh_cache-4e5152b0-7bac-4dc2-b6c7-6590fa2d5978" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1915.471017] env[62684]: DEBUG oslo_concurrency.lockutils [None req-887f4639-845a-4ecf-bac2-f81235610243 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquired lock "refresh_cache-4e5152b0-7bac-4dc2-b6c7-6590fa2d5978" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1915.471017] env[62684]: DEBUG nova.network.neutron [None req-887f4639-845a-4ecf-bac2-f81235610243 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1915.643443] env[62684]: DEBUG oslo_vmware.api [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052829, 'name': PowerOnVM_Task, 'duration_secs': 0.427659} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1915.648307] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1915.656931] env[62684]: DEBUG oslo_vmware.api [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': task-2052830, 'name': Rename_Task, 'duration_secs': 0.208535} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1915.659283] env[62684]: DEBUG nova.compute.manager [None req-4fe9a57c-3139-48d7-a264-e6b9b7a24279 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1915.660171] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1915.661804] env[62684]: DEBUG nova.compute.manager [req-f88c13de-a93f-47be-835a-accc68b99cc2 req-126535c3-8595-4dd6-b9bb-f219cf348fed service nova] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Received event network-vif-plugged-57b8942c-9a9b-4d95-bc8c-f4367c62ba17 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1915.662052] env[62684]: DEBUG oslo_concurrency.lockutils [req-f88c13de-a93f-47be-835a-accc68b99cc2 req-126535c3-8595-4dd6-b9bb-f219cf348fed service nova] Acquiring lock "4e5152b0-7bac-4dc2-b6c7-6590fa2d5978-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1915.662283] env[62684]: DEBUG oslo_concurrency.lockutils [req-f88c13de-a93f-47be-835a-accc68b99cc2 req-126535c3-8595-4dd6-b9bb-f219cf348fed service nova] Lock "4e5152b0-7bac-4dc2-b6c7-6590fa2d5978-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1915.662464] env[62684]: DEBUG oslo_concurrency.lockutils [req-f88c13de-a93f-47be-835a-accc68b99cc2 req-126535c3-8595-4dd6-b9bb-f219cf348fed service nova] Lock "4e5152b0-7bac-4dc2-b6c7-6590fa2d5978-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1915.663020] env[62684]: DEBUG nova.compute.manager [req-f88c13de-a93f-47be-835a-accc68b99cc2 req-126535c3-8595-4dd6-b9bb-f219cf348fed service nova] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] No waiting events found dispatching network-vif-plugged-57b8942c-9a9b-4d95-bc8c-f4367c62ba17 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1915.663275] env[62684]: WARNING nova.compute.manager [req-f88c13de-a93f-47be-835a-accc68b99cc2 req-126535c3-8595-4dd6-b9bb-f219cf348fed service nova] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Received unexpected event network-vif-plugged-57b8942c-9a9b-4d95-bc8c-f4367c62ba17 for instance with vm_state active and task_state None. [ 1915.664671] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae8d6b55-9485-463b-b9dd-967474be06d9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.668444] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9adb890a-17e2-4b9e-9955-98cb22145f37 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.690025] env[62684]: DEBUG oslo_vmware.api [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2052831, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.690025] env[62684]: DEBUG oslo_vmware.api [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Waiting for the task: (returnval){ [ 1915.690025] env[62684]: value = "task-2052832" [ 1915.690025] env[62684]: _type = "Task" [ 1915.690025] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1915.692953] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.653s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1915.694035] env[62684]: DEBUG nova.compute.manager [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1915.697346] env[62684]: DEBUG oslo_concurrency.lockutils [None req-75edc3f3-b696-46f7-8090-95819be5fa8c tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.939s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1915.698032] env[62684]: DEBUG nova.objects.instance [None req-75edc3f3-b696-46f7-8090-95819be5fa8c tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Lazy-loading 'resources' on Instance uuid cfe219da-adf9-44b9-9df3-752ccf72a68b {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1915.704502] env[62684]: DEBUG oslo_vmware.api [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': task-2052832, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.035117] env[62684]: WARNING nova.network.neutron [None req-887f4639-845a-4ecf-bac2-f81235610243 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] bbb78a3c-6804-4aae-9107-4ae6699c305d already exists in list: networks containing: ['bbb78a3c-6804-4aae-9107-4ae6699c305d']. ignoring it [ 1916.185554] env[62684]: DEBUG oslo_vmware.api [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2052831, 'name': RemoveSnapshot_Task, 'duration_secs': 0.848518} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1916.185717] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Deleted Snapshot of the VM instance {{(pid=62684) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1916.186058] env[62684]: DEBUG nova.compute.manager [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1916.186959] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98bd344f-3e9c-4cf3-86a4-e7a4476cf508 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.204792] env[62684]: DEBUG nova.compute.utils [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1916.212019] env[62684]: DEBUG oslo_vmware.api [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': task-2052832, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.218928] env[62684]: DEBUG nova.compute.manager [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1916.219226] env[62684]: DEBUG nova.network.neutron [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1916.226438] env[62684]: DEBUG nova.compute.manager [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Stashing vm_state: active {{(pid=62684) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1916.289695] env[62684]: DEBUG nova.policy [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0c9327f7394249948899bf76e1837d36', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7855def9d0aa49abb7003ee504b9ccaf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1916.501864] env[62684]: DEBUG nova.network.neutron [None req-887f4639-845a-4ecf-bac2-f81235610243 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Updating instance_info_cache with network_info: [{"id": "220c5589-f035-4097-8c0d-dfd565a9203a", "address": "fa:16:3e:3e:7c:34", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.235", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap220c5589-f0", "ovs_interfaceid": "220c5589-f035-4097-8c0d-dfd565a9203a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "57b8942c-9a9b-4d95-bc8c-f4367c62ba17", "address": "fa:16:3e:1f:5a:45", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57b8942c-9a", "ovs_interfaceid": "57b8942c-9a9b-4d95-bc8c-f4367c62ba17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1916.703392] env[62684]: DEBUG oslo_vmware.api [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': task-2052832, 'name': PowerOnVM_Task, 'duration_secs': 0.844117} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1916.703674] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1916.704042] env[62684]: INFO nova.compute.manager [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Took 5.91 seconds to spawn the instance on the hypervisor. [ 1916.704303] env[62684]: DEBUG nova.compute.manager [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1916.705217] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3887303e-e73c-47f1-943e-37e3b3c839e9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.721473] env[62684]: DEBUG nova.compute.manager [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1916.727469] env[62684]: INFO nova.compute.manager [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Shelve offloading [ 1916.729790] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1916.729790] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-49204690-314b-4816-9805-b8102fcffaea {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.736485] env[62684]: DEBUG oslo_vmware.api [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 1916.736485] env[62684]: value = "task-2052833" [ 1916.736485] env[62684]: _type = "Task" [ 1916.736485] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1916.748064] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] VM already powered off {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1916.748346] env[62684]: DEBUG nova.compute.manager [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1916.749389] env[62684]: DEBUG oslo_concurrency.lockutils [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1916.749933] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fdeed2c-68d0-458b-9bb9-adf072d05be7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.759941] env[62684]: DEBUG oslo_concurrency.lockutils [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquiring lock "refresh_cache-b4cd871a-30ea-4b7a-98ad-00b8676dc2cd" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1916.760163] env[62684]: DEBUG oslo_concurrency.lockutils [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquired lock "refresh_cache-b4cd871a-30ea-4b7a-98ad-00b8676dc2cd" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1916.761059] env[62684]: DEBUG nova.network.neutron [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1916.779799] env[62684]: DEBUG nova.network.neutron [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Successfully created port: 9e7719ad-6572-41df-9951-9dc91c818b24 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1916.815084] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08959bfc-4266-46f7-bbf0-52ff9a192bbe {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.821491] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd457e8b-00f2-42a9-9f54-9e255741e4ce {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.856589] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b816587d-204a-477c-8173-f0da64893bc2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.864413] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1718e969-6741-4002-8033-7f9bb239c858 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.878899] env[62684]: DEBUG nova.compute.provider_tree [None req-75edc3f3-b696-46f7-8090-95819be5fa8c tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1917.012522] env[62684]: DEBUG oslo_concurrency.lockutils [None req-887f4639-845a-4ecf-bac2-f81235610243 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Releasing lock "refresh_cache-4e5152b0-7bac-4dc2-b6c7-6590fa2d5978" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1917.012522] env[62684]: DEBUG oslo_concurrency.lockutils [None req-887f4639-845a-4ecf-bac2-f81235610243 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "4e5152b0-7bac-4dc2-b6c7-6590fa2d5978" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1917.012522] env[62684]: DEBUG oslo_concurrency.lockutils [None req-887f4639-845a-4ecf-bac2-f81235610243 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquired lock "4e5152b0-7bac-4dc2-b6c7-6590fa2d5978" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1917.012522] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62fb341b-0ca6-4550-8aff-e4264fd9513f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.029826] env[62684]: DEBUG nova.virt.hardware [None req-887f4639-845a-4ecf-bac2-f81235610243 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1917.029965] env[62684]: DEBUG nova.virt.hardware [None req-887f4639-845a-4ecf-bac2-f81235610243 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1917.030148] env[62684]: DEBUG nova.virt.hardware [None req-887f4639-845a-4ecf-bac2-f81235610243 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1917.030345] env[62684]: DEBUG nova.virt.hardware [None req-887f4639-845a-4ecf-bac2-f81235610243 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1917.030498] env[62684]: DEBUG nova.virt.hardware [None req-887f4639-845a-4ecf-bac2-f81235610243 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1917.030649] env[62684]: DEBUG nova.virt.hardware [None req-887f4639-845a-4ecf-bac2-f81235610243 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1917.030860] env[62684]: DEBUG nova.virt.hardware [None req-887f4639-845a-4ecf-bac2-f81235610243 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1917.031067] env[62684]: DEBUG nova.virt.hardware [None req-887f4639-845a-4ecf-bac2-f81235610243 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1917.031265] env[62684]: DEBUG nova.virt.hardware [None req-887f4639-845a-4ecf-bac2-f81235610243 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1917.031440] env[62684]: DEBUG nova.virt.hardware [None req-887f4639-845a-4ecf-bac2-f81235610243 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1917.031623] env[62684]: DEBUG nova.virt.hardware [None req-887f4639-845a-4ecf-bac2-f81235610243 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1917.040723] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-887f4639-845a-4ecf-bac2-f81235610243 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Reconfiguring VM to attach interface {{(pid=62684) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1917.040723] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7956a057-b698-4612-b251-f41b15507dd0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.059019] env[62684]: DEBUG oslo_vmware.api [None req-887f4639-845a-4ecf-bac2-f81235610243 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 1917.059019] env[62684]: value = "task-2052834" [ 1917.059019] env[62684]: _type = "Task" [ 1917.059019] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.072108] env[62684]: DEBUG oslo_vmware.api [None req-887f4639-845a-4ecf-bac2-f81235610243 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2052834, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.230845] env[62684]: INFO nova.compute.manager [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Took 46.95 seconds to build instance. [ 1917.382049] env[62684]: DEBUG nova.scheduler.client.report [None req-75edc3f3-b696-46f7-8090-95819be5fa8c tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1917.492609] env[62684]: DEBUG nova.network.neutron [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Updating instance_info_cache with network_info: [{"id": "8be48385-37eb-4c2e-baf8-404a9aad87de", "address": "fa:16:3e:33:7c:80", "network": {"id": "64494ea7-f6d9-430c-8ac7-e876e763004b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2056829508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.249", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e57b232a7e7647c7a3b2bca3c096feb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8be48385-37", "ovs_interfaceid": "8be48385-37eb-4c2e-baf8-404a9aad87de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1917.570745] env[62684]: DEBUG oslo_vmware.api [None req-887f4639-845a-4ecf-bac2-f81235610243 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2052834, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.733596] env[62684]: DEBUG nova.compute.manager [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1917.736415] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9a3e4884-3602-4336-b432-31445a240c94 tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Lock "8449f09b-4e7b-4511-bb3c-2ff6667addb2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.375s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1917.770986] env[62684]: DEBUG nova.virt.hardware [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1917.771230] env[62684]: DEBUG nova.virt.hardware [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1917.771401] env[62684]: DEBUG nova.virt.hardware [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1917.771591] env[62684]: DEBUG nova.virt.hardware [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1917.771804] env[62684]: DEBUG nova.virt.hardware [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1917.771891] env[62684]: DEBUG nova.virt.hardware [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1917.772192] env[62684]: DEBUG nova.virt.hardware [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1917.772367] env[62684]: DEBUG nova.virt.hardware [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1917.772544] env[62684]: DEBUG nova.virt.hardware [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1917.772716] env[62684]: DEBUG nova.virt.hardware [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1917.772897] env[62684]: DEBUG nova.virt.hardware [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1917.773834] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e744048c-1ac1-4e41-8214-a5c4417138a5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.784141] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac49f638-5ec4-4386-aa59-ee229f48ef61 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.888132] env[62684]: DEBUG oslo_concurrency.lockutils [None req-75edc3f3-b696-46f7-8090-95819be5fa8c tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.191s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1917.891387] env[62684]: DEBUG oslo_concurrency.lockutils [None req-272e1511-ea93-49d8-bf6f-d53c8f6e3349 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.991s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1917.891647] env[62684]: DEBUG nova.objects.instance [None req-272e1511-ea93-49d8-bf6f-d53c8f6e3349 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Lazy-loading 'resources' on Instance uuid d532b5fa-90a3-4f25-8684-4eabaa432c86 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1917.919860] env[62684]: INFO nova.scheduler.client.report [None req-75edc3f3-b696-46f7-8090-95819be5fa8c tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Deleted allocations for instance cfe219da-adf9-44b9-9df3-752ccf72a68b [ 1917.995782] env[62684]: DEBUG oslo_concurrency.lockutils [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Releasing lock "refresh_cache-b4cd871a-30ea-4b7a-98ad-00b8676dc2cd" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1918.069994] env[62684]: DEBUG oslo_vmware.api [None req-887f4639-845a-4ecf-bac2-f81235610243 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2052834, 'name': ReconfigVM_Task, 'duration_secs': 0.830174} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1918.069994] env[62684]: DEBUG oslo_concurrency.lockutils [None req-887f4639-845a-4ecf-bac2-f81235610243 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Releasing lock "4e5152b0-7bac-4dc2-b6c7-6590fa2d5978" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1918.069994] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-887f4639-845a-4ecf-bac2-f81235610243 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Reconfigured VM to attach interface {{(pid=62684) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1918.238615] env[62684]: DEBUG nova.compute.manager [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1918.369357] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1918.370460] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-730ddb42-7b8c-4854-ab70-d6b4683b69d5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.379298] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1918.379746] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0bc3fb1f-127b-4ba9-b3f9-ab21bef9fbff {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.431847] env[62684]: DEBUG oslo_concurrency.lockutils [None req-75edc3f3-b696-46f7-8090-95819be5fa8c tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Lock "cfe219da-adf9-44b9-9df3-752ccf72a68b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.867s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1918.511391] env[62684]: DEBUG nova.network.neutron [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Successfully updated port: 9e7719ad-6572-41df-9951-9dc91c818b24 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1918.570308] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1918.570579] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1918.570773] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Deleting the datastore file [datastore1] b4cd871a-30ea-4b7a-98ad-00b8676dc2cd {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1918.571059] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0be25482-7ee8-4327-878e-aacbdd7c793b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.574519] env[62684]: DEBUG oslo_concurrency.lockutils [None req-887f4639-845a-4ecf-bac2-f81235610243 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "interface-4e5152b0-7bac-4dc2-b6c7-6590fa2d5978-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.697s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1918.580425] env[62684]: DEBUG oslo_vmware.api [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 1918.580425] env[62684]: value = "task-2052836" [ 1918.580425] env[62684]: _type = "Task" [ 1918.580425] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1918.594022] env[62684]: DEBUG oslo_vmware.api [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2052836, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.761952] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1918.835401] env[62684]: DEBUG nova.compute.manager [req-c0c1558c-025b-4fac-b0ce-da2f34968055 req-f897a234-b0b3-4b6c-9011-b555397d73e6 service nova] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Received event network-changed-57b8942c-9a9b-4d95-bc8c-f4367c62ba17 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1918.835633] env[62684]: DEBUG nova.compute.manager [req-c0c1558c-025b-4fac-b0ce-da2f34968055 req-f897a234-b0b3-4b6c-9011-b555397d73e6 service nova] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Refreshing instance network info cache due to event network-changed-57b8942c-9a9b-4d95-bc8c-f4367c62ba17. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1918.835860] env[62684]: DEBUG oslo_concurrency.lockutils [req-c0c1558c-025b-4fac-b0ce-da2f34968055 req-f897a234-b0b3-4b6c-9011-b555397d73e6 service nova] Acquiring lock "refresh_cache-4e5152b0-7bac-4dc2-b6c7-6590fa2d5978" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1918.836080] env[62684]: DEBUG oslo_concurrency.lockutils [req-c0c1558c-025b-4fac-b0ce-da2f34968055 req-f897a234-b0b3-4b6c-9011-b555397d73e6 service nova] Acquired lock "refresh_cache-4e5152b0-7bac-4dc2-b6c7-6590fa2d5978" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1918.836282] env[62684]: DEBUG nova.network.neutron [req-c0c1558c-025b-4fac-b0ce-da2f34968055 req-f897a234-b0b3-4b6c-9011-b555397d73e6 service nova] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Refreshing network info cache for port 57b8942c-9a9b-4d95-bc8c-f4367c62ba17 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1918.867139] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd151e5b-6ef6-4eec-b266-f5c903bc4117 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.875640] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-986372c8-d15d-4a6a-8386-8e6f12d689a8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.910442] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d175f26-150f-4236-93f3-620c7e24d703 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.919864] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40048af7-1731-4775-aa54-518b034d3918 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.935280] env[62684]: DEBUG nova.compute.provider_tree [None req-272e1511-ea93-49d8-bf6f-d53c8f6e3349 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1918.977827] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Acquiring lock "2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1918.978057] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Lock "2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.017275] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "refresh_cache-274d214a-4b92-4900-a66c-54baea2a68f8" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1919.017427] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquired lock "refresh_cache-274d214a-4b92-4900-a66c-54baea2a68f8" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1919.017593] env[62684]: DEBUG nova.network.neutron [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1919.093563] env[62684]: DEBUG oslo_vmware.api [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2052836, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.410019} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1919.094111] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1919.094264] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1919.094462] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1919.115685] env[62684]: INFO nova.scheduler.client.report [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Deleted allocations for instance b4cd871a-30ea-4b7a-98ad-00b8676dc2cd [ 1919.244772] env[62684]: INFO nova.compute.manager [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Rebuilding instance [ 1919.289563] env[62684]: DEBUG nova.compute.manager [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1919.290788] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58cf3df0-4070-4890-9bdf-89ecc0bba538 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.334688] env[62684]: DEBUG nova.compute.manager [req-c68f0c69-b747-454c-8f0c-94ac83e5cf68 req-5ea5ec53-04da-416a-97ab-8ddfce97751b service nova] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Received event network-vif-plugged-9e7719ad-6572-41df-9951-9dc91c818b24 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1919.334789] env[62684]: DEBUG oslo_concurrency.lockutils [req-c68f0c69-b747-454c-8f0c-94ac83e5cf68 req-5ea5ec53-04da-416a-97ab-8ddfce97751b service nova] Acquiring lock "274d214a-4b92-4900-a66c-54baea2a68f8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1919.334993] env[62684]: DEBUG oslo_concurrency.lockutils [req-c68f0c69-b747-454c-8f0c-94ac83e5cf68 req-5ea5ec53-04da-416a-97ab-8ddfce97751b service nova] Lock "274d214a-4b92-4900-a66c-54baea2a68f8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.335806] env[62684]: DEBUG oslo_concurrency.lockutils [req-c68f0c69-b747-454c-8f0c-94ac83e5cf68 req-5ea5ec53-04da-416a-97ab-8ddfce97751b service nova] Lock "274d214a-4b92-4900-a66c-54baea2a68f8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.335806] env[62684]: DEBUG nova.compute.manager [req-c68f0c69-b747-454c-8f0c-94ac83e5cf68 req-5ea5ec53-04da-416a-97ab-8ddfce97751b service nova] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] No waiting events found dispatching network-vif-plugged-9e7719ad-6572-41df-9951-9dc91c818b24 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1919.335806] env[62684]: WARNING nova.compute.manager [req-c68f0c69-b747-454c-8f0c-94ac83e5cf68 req-5ea5ec53-04da-416a-97ab-8ddfce97751b service nova] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Received unexpected event network-vif-plugged-9e7719ad-6572-41df-9951-9dc91c818b24 for instance with vm_state building and task_state spawning. [ 1919.440505] env[62684]: DEBUG nova.scheduler.client.report [None req-272e1511-ea93-49d8-bf6f-d53c8f6e3349 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1919.554541] env[62684]: DEBUG nova.network.neutron [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1919.620580] env[62684]: DEBUG oslo_concurrency.lockutils [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1919.691144] env[62684]: DEBUG nova.network.neutron [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Updating instance_info_cache with network_info: [{"id": "9e7719ad-6572-41df-9951-9dc91c818b24", "address": "fa:16:3e:b8:67:f7", "network": {"id": "2fa98fa4-ff7c-44e6-add0-693f55fd4b03", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2019954029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7855def9d0aa49abb7003ee504b9ccaf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e7719ad-65", "ovs_interfaceid": "9e7719ad-6572-41df-9951-9dc91c818b24", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1919.789252] env[62684]: DEBUG nova.network.neutron [req-c0c1558c-025b-4fac-b0ce-da2f34968055 req-f897a234-b0b3-4b6c-9011-b555397d73e6 service nova] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Updated VIF entry in instance network info cache for port 57b8942c-9a9b-4d95-bc8c-f4367c62ba17. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1919.789694] env[62684]: DEBUG nova.network.neutron [req-c0c1558c-025b-4fac-b0ce-da2f34968055 req-f897a234-b0b3-4b6c-9011-b555397d73e6 service nova] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Updating instance_info_cache with network_info: [{"id": "220c5589-f035-4097-8c0d-dfd565a9203a", "address": "fa:16:3e:3e:7c:34", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.235", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap220c5589-f0", "ovs_interfaceid": "220c5589-f035-4097-8c0d-dfd565a9203a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "57b8942c-9a9b-4d95-bc8c-f4367c62ba17", "address": "fa:16:3e:1f:5a:45", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57b8942c-9a", "ovs_interfaceid": "57b8942c-9a9b-4d95-bc8c-f4367c62ba17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1919.806214] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1919.806214] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aae14b2b-8f45-4e15-8fb0-8a80ec16f720 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.814334] env[62684]: DEBUG oslo_vmware.api [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Waiting for the task: (returnval){ [ 1919.814334] env[62684]: value = "task-2052837" [ 1919.814334] env[62684]: _type = "Task" [ 1919.814334] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1919.822559] env[62684]: DEBUG oslo_vmware.api [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': task-2052837, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.944203] env[62684]: DEBUG oslo_concurrency.lockutils [None req-272e1511-ea93-49d8-bf6f-d53c8f6e3349 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.053s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.946618] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 42.517s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.951634] env[62684]: INFO nova.compute.claims [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1919.967210] env[62684]: INFO nova.scheduler.client.report [None req-272e1511-ea93-49d8-bf6f-d53c8f6e3349 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Deleted allocations for instance d532b5fa-90a3-4f25-8684-4eabaa432c86 [ 1920.194117] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Releasing lock "refresh_cache-274d214a-4b92-4900-a66c-54baea2a68f8" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1920.194512] env[62684]: DEBUG nova.compute.manager [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Instance network_info: |[{"id": "9e7719ad-6572-41df-9951-9dc91c818b24", "address": "fa:16:3e:b8:67:f7", "network": {"id": "2fa98fa4-ff7c-44e6-add0-693f55fd4b03", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2019954029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7855def9d0aa49abb7003ee504b9ccaf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e7719ad-65", "ovs_interfaceid": "9e7719ad-6572-41df-9951-9dc91c818b24", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1920.194937] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b8:67:f7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0cd5d325-3053-407e-a4ee-f627e82a23f9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9e7719ad-6572-41df-9951-9dc91c818b24', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1920.202700] env[62684]: DEBUG oslo.service.loopingcall [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1920.202939] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1920.203199] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-27184392-90fd-4230-af54-19ee178f8348 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.228521] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1920.228521] env[62684]: value = "task-2052838" [ 1920.228521] env[62684]: _type = "Task" [ 1920.228521] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1920.238650] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052838, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.293195] env[62684]: DEBUG oslo_concurrency.lockutils [req-c0c1558c-025b-4fac-b0ce-da2f34968055 req-f897a234-b0b3-4b6c-9011-b555397d73e6 service nova] Releasing lock "refresh_cache-4e5152b0-7bac-4dc2-b6c7-6590fa2d5978" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1920.323970] env[62684]: DEBUG oslo_vmware.api [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': task-2052837, 'name': PowerOffVM_Task, 'duration_secs': 0.212362} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1920.324336] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1920.324571] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1920.325412] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d7514ca-ba09-4af8-8e29-99ce49894482 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.333683] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1920.334121] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6b3056ea-b8c7-48ba-8850-5326556ab107 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.352444] env[62684]: INFO nova.compute.manager [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Rescuing [ 1920.352780] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquiring lock "refresh_cache-b1f70e39-bf37-4fb8-b95b-653b59bec265" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1920.353018] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquired lock "refresh_cache-b1f70e39-bf37-4fb8-b95b-653b59bec265" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1920.353290] env[62684]: DEBUG nova.network.neutron [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1920.358020] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1920.358020] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1920.358020] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Deleting the datastore file [datastore1] 8449f09b-4e7b-4511-bb3c-2ff6667addb2 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1920.358020] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-68b4d12c-6cee-4859-b873-029abb8eec7a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.364688] env[62684]: DEBUG oslo_vmware.api [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Waiting for the task: (returnval){ [ 1920.364688] env[62684]: value = "task-2052840" [ 1920.364688] env[62684]: _type = "Task" [ 1920.364688] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1920.375366] env[62684]: DEBUG oslo_vmware.api [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': task-2052840, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.477397] env[62684]: DEBUG oslo_concurrency.lockutils [None req-272e1511-ea93-49d8-bf6f-d53c8f6e3349 tempest-VolumesAssistedSnapshotsTest-1821509474 tempest-VolumesAssistedSnapshotsTest-1821509474-project-member] Lock "d532b5fa-90a3-4f25-8684-4eabaa432c86" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.984s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1920.740082] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052838, 'name': CreateVM_Task, 'duration_secs': 0.378107} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1920.740082] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1920.740451] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1920.740451] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1920.740647] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1920.740898] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46ccf7f3-24a1-476e-8748-7acfda9baf74 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.747597] env[62684]: DEBUG oslo_vmware.api [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1920.747597] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52162562-a258-e358-58e4-715c63f16c49" [ 1920.747597] env[62684]: _type = "Task" [ 1920.747597] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1920.755010] env[62684]: DEBUG oslo_vmware.api [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52162562-a258-e358-58e4-715c63f16c49, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.877142] env[62684]: DEBUG oslo_vmware.api [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': task-2052840, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.091581} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1920.877412] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1920.877605] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1920.877790] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1921.095577] env[62684]: DEBUG nova.compute.manager [req-7fe712c5-c06a-41a6-a79b-01ff53ff388c req-5fd9aea8-3680-4917-9962-38796aa54f16 service nova] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Received event network-vif-unplugged-8be48385-37eb-4c2e-baf8-404a9aad87de {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1921.095815] env[62684]: DEBUG oslo_concurrency.lockutils [req-7fe712c5-c06a-41a6-a79b-01ff53ff388c req-5fd9aea8-3680-4917-9962-38796aa54f16 service nova] Acquiring lock "b4cd871a-30ea-4b7a-98ad-00b8676dc2cd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1921.096074] env[62684]: DEBUG oslo_concurrency.lockutils [req-7fe712c5-c06a-41a6-a79b-01ff53ff388c req-5fd9aea8-3680-4917-9962-38796aa54f16 service nova] Lock "b4cd871a-30ea-4b7a-98ad-00b8676dc2cd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1921.096262] env[62684]: DEBUG oslo_concurrency.lockutils [req-7fe712c5-c06a-41a6-a79b-01ff53ff388c req-5fd9aea8-3680-4917-9962-38796aa54f16 service nova] Lock "b4cd871a-30ea-4b7a-98ad-00b8676dc2cd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1921.096437] env[62684]: DEBUG nova.compute.manager [req-7fe712c5-c06a-41a6-a79b-01ff53ff388c req-5fd9aea8-3680-4917-9962-38796aa54f16 service nova] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] No waiting events found dispatching network-vif-unplugged-8be48385-37eb-4c2e-baf8-404a9aad87de {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1921.096609] env[62684]: WARNING nova.compute.manager [req-7fe712c5-c06a-41a6-a79b-01ff53ff388c req-5fd9aea8-3680-4917-9962-38796aa54f16 service nova] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Received unexpected event network-vif-unplugged-8be48385-37eb-4c2e-baf8-404a9aad87de for instance with vm_state shelved_offloaded and task_state unshelving. [ 1921.096773] env[62684]: DEBUG nova.compute.manager [req-7fe712c5-c06a-41a6-a79b-01ff53ff388c req-5fd9aea8-3680-4917-9962-38796aa54f16 service nova] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Received event network-changed-8be48385-37eb-4c2e-baf8-404a9aad87de {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1921.096938] env[62684]: DEBUG nova.compute.manager [req-7fe712c5-c06a-41a6-a79b-01ff53ff388c req-5fd9aea8-3680-4917-9962-38796aa54f16 service nova] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Refreshing instance network info cache due to event network-changed-8be48385-37eb-4c2e-baf8-404a9aad87de. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1921.097179] env[62684]: DEBUG oslo_concurrency.lockutils [req-7fe712c5-c06a-41a6-a79b-01ff53ff388c req-5fd9aea8-3680-4917-9962-38796aa54f16 service nova] Acquiring lock "refresh_cache-b4cd871a-30ea-4b7a-98ad-00b8676dc2cd" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1921.097255] env[62684]: DEBUG oslo_concurrency.lockutils [req-7fe712c5-c06a-41a6-a79b-01ff53ff388c req-5fd9aea8-3680-4917-9962-38796aa54f16 service nova] Acquired lock "refresh_cache-b4cd871a-30ea-4b7a-98ad-00b8676dc2cd" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1921.097925] env[62684]: DEBUG nova.network.neutron [req-7fe712c5-c06a-41a6-a79b-01ff53ff388c req-5fd9aea8-3680-4917-9962-38796aa54f16 service nova] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Refreshing network info cache for port 8be48385-37eb-4c2e-baf8-404a9aad87de {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1921.240285] env[62684]: DEBUG nova.network.neutron [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Updating instance_info_cache with network_info: [{"id": "35a0f9ef-b68c-43df-8887-6c35257bbc58", "address": "fa:16:3e:7a:c4:6c", "network": {"id": "bd253713-4e81-4c94-9689-22b81e7f51b6", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-307001665-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd812751722143fabedfa986a2d98b59", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35a0f9ef-b6", "ovs_interfaceid": "35a0f9ef-b68c-43df-8887-6c35257bbc58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1921.262135] env[62684]: DEBUG oslo_vmware.api [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52162562-a258-e358-58e4-715c63f16c49, 'name': SearchDatastore_Task, 'duration_secs': 0.04324} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1921.262135] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1921.262251] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1921.262464] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1921.262613] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1921.263666] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1921.263666] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e5bf6199-00ef-48e7-a417-15c64b972c1d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.272640] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1921.273020] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1921.276084] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b0ceb06-d2f7-486c-b048-23518b03e6a9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.281948] env[62684]: DEBUG oslo_vmware.api [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1921.281948] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52023a70-1328-e00b-68e2-45d7bab7bf2d" [ 1921.281948] env[62684]: _type = "Task" [ 1921.281948] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1921.289711] env[62684]: DEBUG oslo_vmware.api [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52023a70-1328-e00b-68e2-45d7bab7bf2d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.336018] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dd86c9b5-6e12-4259-acfe-47213cb678a9 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "interface-4e5152b0-7bac-4dc2-b6c7-6590fa2d5978-57b8942c-9a9b-4d95-bc8c-f4367c62ba17" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1921.336018] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dd86c9b5-6e12-4259-acfe-47213cb678a9 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "interface-4e5152b0-7bac-4dc2-b6c7-6590fa2d5978-57b8942c-9a9b-4d95-bc8c-f4367c62ba17" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1921.392288] env[62684]: DEBUG nova.compute.manager [req-cc932d71-63ca-470b-98d2-cbad14973662 req-03646aca-4147-44c7-8e82-ecdbe21caf63 service nova] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Received event network-changed-9e7719ad-6572-41df-9951-9dc91c818b24 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1921.392503] env[62684]: DEBUG nova.compute.manager [req-cc932d71-63ca-470b-98d2-cbad14973662 req-03646aca-4147-44c7-8e82-ecdbe21caf63 service nova] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Refreshing instance network info cache due to event network-changed-9e7719ad-6572-41df-9951-9dc91c818b24. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1921.392728] env[62684]: DEBUG oslo_concurrency.lockutils [req-cc932d71-63ca-470b-98d2-cbad14973662 req-03646aca-4147-44c7-8e82-ecdbe21caf63 service nova] Acquiring lock "refresh_cache-274d214a-4b92-4900-a66c-54baea2a68f8" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1921.392881] env[62684]: DEBUG oslo_concurrency.lockutils [req-cc932d71-63ca-470b-98d2-cbad14973662 req-03646aca-4147-44c7-8e82-ecdbe21caf63 service nova] Acquired lock "refresh_cache-274d214a-4b92-4900-a66c-54baea2a68f8" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1921.393067] env[62684]: DEBUG nova.network.neutron [req-cc932d71-63ca-470b-98d2-cbad14973662 req-03646aca-4147-44c7-8e82-ecdbe21caf63 service nova] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Refreshing network info cache for port 9e7719ad-6572-41df-9951-9dc91c818b24 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1921.469328] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee57ada0-f2dd-46b9-a391-c36896243efb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.477112] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c1f3ad2-d11b-4036-a0d1-0c4e36ff28b5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.513481] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75be5ca0-59cd-4582-95ea-404ee1bc1cc5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.522018] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecdbcea0-2884-49c5-983f-6d47e25fe90e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.537300] env[62684]: DEBUG nova.compute.provider_tree [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1921.743654] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Releasing lock "refresh_cache-b1f70e39-bf37-4fb8-b95b-653b59bec265" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1921.801983] env[62684]: DEBUG oslo_vmware.api [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52023a70-1328-e00b-68e2-45d7bab7bf2d, 'name': SearchDatastore_Task, 'duration_secs': 0.03169} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1921.804105] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee9c797e-4af1-4c74-a6a9-65294787ecdd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.809385] env[62684]: DEBUG oslo_vmware.api [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1921.809385] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e65934-154c-2dd9-8aae-859faba38e7c" [ 1921.809385] env[62684]: _type = "Task" [ 1921.809385] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1921.820008] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquiring lock "b4cd871a-30ea-4b7a-98ad-00b8676dc2cd" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1921.820616] env[62684]: DEBUG oslo_vmware.api [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e65934-154c-2dd9-8aae-859faba38e7c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.837160] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dd86c9b5-6e12-4259-acfe-47213cb678a9 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "4e5152b0-7bac-4dc2-b6c7-6590fa2d5978" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1921.837368] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dd86c9b5-6e12-4259-acfe-47213cb678a9 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquired lock "4e5152b0-7bac-4dc2-b6c7-6590fa2d5978" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1921.838517] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dee64d8-e23e-450c-b854-945f5946183e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.860037] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad616247-87de-485f-92ba-f8b17d4bc259 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.889084] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-dd86c9b5-6e12-4259-acfe-47213cb678a9 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Reconfiguring VM to detach interface {{(pid=62684) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1921.891864] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-75cf23b2-d516-4b84-a70c-8641a4d9607f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.915318] env[62684]: DEBUG oslo_vmware.api [None req-dd86c9b5-6e12-4259-acfe-47213cb678a9 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 1921.915318] env[62684]: value = "task-2052841" [ 1921.915318] env[62684]: _type = "Task" [ 1921.915318] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1921.923689] env[62684]: DEBUG oslo_vmware.api [None req-dd86c9b5-6e12-4259-acfe-47213cb678a9 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2052841, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.938540] env[62684]: DEBUG nova.virt.hardware [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1921.938860] env[62684]: DEBUG nova.virt.hardware [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1921.939175] env[62684]: DEBUG nova.virt.hardware [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1921.939473] env[62684]: DEBUG nova.virt.hardware [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1921.939646] env[62684]: DEBUG nova.virt.hardware [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1921.939817] env[62684]: DEBUG nova.virt.hardware [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1921.940062] env[62684]: DEBUG nova.virt.hardware [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1921.940252] env[62684]: DEBUG nova.virt.hardware [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1921.940453] env[62684]: DEBUG nova.virt.hardware [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1921.940638] env[62684]: DEBUG nova.virt.hardware [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1921.940829] env[62684]: DEBUG nova.virt.hardware [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1921.941757] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56ac8d02-6325-465a-aac0-9595932136d6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.949531] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c341e8db-45c6-4975-9ff8-af01287e067a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.963233] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Instance VIF info [] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1921.969427] env[62684]: DEBUG oslo.service.loopingcall [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1921.969427] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1921.969427] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-02eb1676-7176-464a-a373-1d7f60648ff4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.989869] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1921.989869] env[62684]: value = "task-2052842" [ 1921.989869] env[62684]: _type = "Task" [ 1921.989869] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1922.000558] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052842, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.045064] env[62684]: DEBUG nova.network.neutron [req-7fe712c5-c06a-41a6-a79b-01ff53ff388c req-5fd9aea8-3680-4917-9962-38796aa54f16 service nova] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Updated VIF entry in instance network info cache for port 8be48385-37eb-4c2e-baf8-404a9aad87de. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1922.048026] env[62684]: DEBUG nova.network.neutron [req-7fe712c5-c06a-41a6-a79b-01ff53ff388c req-5fd9aea8-3680-4917-9962-38796aa54f16 service nova] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Updating instance_info_cache with network_info: [{"id": "8be48385-37eb-4c2e-baf8-404a9aad87de", "address": "fa:16:3e:33:7c:80", "network": {"id": "64494ea7-f6d9-430c-8ac7-e876e763004b", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-2056829508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.249", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e57b232a7e7647c7a3b2bca3c096feb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap8be48385-37", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1922.064472] env[62684]: ERROR nova.scheduler.client.report [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [req-533b12b8-7b25-45f8-82ee-55448fab04d7] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c23c281e-ec1f-4876-972e-a98655f2084f. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-533b12b8-7b25-45f8-82ee-55448fab04d7"}]} [ 1922.086617] env[62684]: DEBUG nova.scheduler.client.report [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Refreshing inventories for resource provider c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1922.107058] env[62684]: DEBUG nova.scheduler.client.report [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Updating ProviderTree inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1922.107369] env[62684]: DEBUG nova.compute.provider_tree [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1922.123379] env[62684]: DEBUG nova.scheduler.client.report [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Refreshing aggregate associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, aggregates: None {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1922.157133] env[62684]: DEBUG nova.scheduler.client.report [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Refreshing trait associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1922.306660] env[62684]: DEBUG nova.network.neutron [req-cc932d71-63ca-470b-98d2-cbad14973662 req-03646aca-4147-44c7-8e82-ecdbe21caf63 service nova] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Updated VIF entry in instance network info cache for port 9e7719ad-6572-41df-9951-9dc91c818b24. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1922.307019] env[62684]: DEBUG nova.network.neutron [req-cc932d71-63ca-470b-98d2-cbad14973662 req-03646aca-4147-44c7-8e82-ecdbe21caf63 service nova] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Updating instance_info_cache with network_info: [{"id": "9e7719ad-6572-41df-9951-9dc91c818b24", "address": "fa:16:3e:b8:67:f7", "network": {"id": "2fa98fa4-ff7c-44e6-add0-693f55fd4b03", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2019954029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7855def9d0aa49abb7003ee504b9ccaf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e7719ad-65", "ovs_interfaceid": "9e7719ad-6572-41df-9951-9dc91c818b24", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1922.308393] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1922.308650] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c705c26d-9527-49f2-a981-cfff03aa8ffd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.323418] env[62684]: DEBUG oslo_vmware.api [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e65934-154c-2dd9-8aae-859faba38e7c, 'name': SearchDatastore_Task, 'duration_secs': 0.00943} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1922.324709] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1922.324988] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 274d214a-4b92-4900-a66c-54baea2a68f8/274d214a-4b92-4900-a66c-54baea2a68f8.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1922.325324] env[62684]: DEBUG oslo_vmware.api [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1922.325324] env[62684]: value = "task-2052843" [ 1922.325324] env[62684]: _type = "Task" [ 1922.325324] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1922.327861] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4bb2acdc-9f30-412a-a0e2-8b74c5b3b13e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.338723] env[62684]: DEBUG oslo_vmware.api [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052843, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.340177] env[62684]: DEBUG oslo_vmware.api [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1922.340177] env[62684]: value = "task-2052844" [ 1922.340177] env[62684]: _type = "Task" [ 1922.340177] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1922.352535] env[62684]: DEBUG oslo_vmware.api [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052844, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.434393] env[62684]: DEBUG oslo_vmware.api [None req-dd86c9b5-6e12-4259-acfe-47213cb678a9 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2052841, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.504040] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052842, 'name': CreateVM_Task, 'duration_secs': 0.251272} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1922.504040] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1922.505739] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1922.505739] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1922.505739] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1922.505739] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3622b16-cbd4-4214-8c19-66d1791eac1a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.510899] env[62684]: DEBUG oslo_vmware.api [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Waiting for the task: (returnval){ [ 1922.510899] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5246f8f7-a936-ec68-f63e-f37acfa33380" [ 1922.510899] env[62684]: _type = "Task" [ 1922.510899] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1922.522224] env[62684]: DEBUG oslo_vmware.api [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5246f8f7-a936-ec68-f63e-f37acfa33380, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.550194] env[62684]: DEBUG oslo_concurrency.lockutils [req-7fe712c5-c06a-41a6-a79b-01ff53ff388c req-5fd9aea8-3680-4917-9962-38796aa54f16 service nova] Releasing lock "refresh_cache-b4cd871a-30ea-4b7a-98ad-00b8676dc2cd" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1922.690312] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb6c7bb6-60c8-4d98-9cb5-6cf4f0534aa4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.698650] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de56ddd1-cc3c-4b55-baa5-4719aab2ebae {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.731742] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f821e8cc-c310-4031-9e00-35b428ac52a2 tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Acquiring lock "c6dc5401-f59e-4c18-9553-1240e2f49bce" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1922.732027] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f821e8cc-c310-4031-9e00-35b428ac52a2 tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Lock "c6dc5401-f59e-4c18-9553-1240e2f49bce" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1922.732251] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f821e8cc-c310-4031-9e00-35b428ac52a2 tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Acquiring lock "c6dc5401-f59e-4c18-9553-1240e2f49bce-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1922.732441] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f821e8cc-c310-4031-9e00-35b428ac52a2 tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Lock "c6dc5401-f59e-4c18-9553-1240e2f49bce-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1922.732674] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f821e8cc-c310-4031-9e00-35b428ac52a2 tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Lock "c6dc5401-f59e-4c18-9553-1240e2f49bce-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1922.735290] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53b919dc-74a5-466d-8a53-1254d07aeeb1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.738478] env[62684]: INFO nova.compute.manager [None req-f821e8cc-c310-4031-9e00-35b428ac52a2 tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Terminating instance [ 1922.740863] env[62684]: DEBUG nova.compute.manager [None req-f821e8cc-c310-4031-9e00-35b428ac52a2 tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1922.741091] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f821e8cc-c310-4031-9e00-35b428ac52a2 tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1922.741931] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90156922-b5f0-492d-b344-2d1c9c1fbce9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.748878] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fdae74b-b5dd-4e4d-b884-12a045a9f612 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.755937] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f821e8cc-c310-4031-9e00-35b428ac52a2 tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1922.756730] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0200e47c-bae7-4a0b-a907-470a656171c3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.766688] env[62684]: DEBUG nova.compute.provider_tree [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1922.776507] env[62684]: DEBUG oslo_vmware.api [None req-f821e8cc-c310-4031-9e00-35b428ac52a2 tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Waiting for the task: (returnval){ [ 1922.776507] env[62684]: value = "task-2052845" [ 1922.776507] env[62684]: _type = "Task" [ 1922.776507] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1922.782324] env[62684]: DEBUG oslo_vmware.api [None req-f821e8cc-c310-4031-9e00-35b428ac52a2 tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052845, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.814370] env[62684]: DEBUG oslo_concurrency.lockutils [req-cc932d71-63ca-470b-98d2-cbad14973662 req-03646aca-4147-44c7-8e82-ecdbe21caf63 service nova] Releasing lock "refresh_cache-274d214a-4b92-4900-a66c-54baea2a68f8" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1922.839711] env[62684]: DEBUG oslo_vmware.api [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052843, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.852155] env[62684]: DEBUG oslo_vmware.api [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052844, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.485854} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1922.852449] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 274d214a-4b92-4900-a66c-54baea2a68f8/274d214a-4b92-4900-a66c-54baea2a68f8.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1922.852949] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1922.852949] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2649cb50-6dcc-4195-aa69-1c9754be957e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.861025] env[62684]: DEBUG oslo_vmware.api [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1922.861025] env[62684]: value = "task-2052846" [ 1922.861025] env[62684]: _type = "Task" [ 1922.861025] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1922.870514] env[62684]: DEBUG oslo_vmware.api [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052846, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.906028] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d05225cf-8810-46b4-a8a7-e7b613f7f71f tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Acquiring lock "5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1922.906028] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d05225cf-8810-46b4-a8a7-e7b613f7f71f tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Lock "5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1922.906028] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d05225cf-8810-46b4-a8a7-e7b613f7f71f tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Acquiring lock "5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1922.906028] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d05225cf-8810-46b4-a8a7-e7b613f7f71f tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Lock "5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1922.906028] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d05225cf-8810-46b4-a8a7-e7b613f7f71f tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Lock "5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1922.907520] env[62684]: INFO nova.compute.manager [None req-d05225cf-8810-46b4-a8a7-e7b613f7f71f tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Terminating instance [ 1922.914038] env[62684]: DEBUG nova.compute.manager [None req-d05225cf-8810-46b4-a8a7-e7b613f7f71f tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1922.914038] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-d05225cf-8810-46b4-a8a7-e7b613f7f71f tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1922.914038] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7e9a5e1-5747-48d6-b87c-20b2ed3837a3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.923766] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-d05225cf-8810-46b4-a8a7-e7b613f7f71f tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1922.924696] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fb05a3c2-66c5-4003-a16e-dec2c45cf18d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.931375] env[62684]: DEBUG oslo_vmware.api [None req-dd86c9b5-6e12-4259-acfe-47213cb678a9 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2052841, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.937058] env[62684]: DEBUG oslo_vmware.api [None req-d05225cf-8810-46b4-a8a7-e7b613f7f71f tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Waiting for the task: (returnval){ [ 1922.937058] env[62684]: value = "task-2052847" [ 1922.937058] env[62684]: _type = "Task" [ 1922.937058] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1922.948354] env[62684]: DEBUG oslo_vmware.api [None req-d05225cf-8810-46b4-a8a7-e7b613f7f71f tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052847, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1923.024481] env[62684]: DEBUG oslo_vmware.api [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5246f8f7-a936-ec68-f63e-f37acfa33380, 'name': SearchDatastore_Task, 'duration_secs': 0.064007} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1923.024481] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1923.024481] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1923.024481] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1923.024481] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1923.024759] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1923.024849] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9aa2a050-4c0e-45bd-9601-89b5fcfc4cde {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.033163] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1923.033683] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1923.034585] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1fcb774f-ff41-4fbb-90c1-2a1cc5a36736 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.041455] env[62684]: DEBUG oslo_vmware.api [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Waiting for the task: (returnval){ [ 1923.041455] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b506ad-be40-11ab-085c-4bf810836e62" [ 1923.041455] env[62684]: _type = "Task" [ 1923.041455] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1923.050218] env[62684]: DEBUG oslo_vmware.api [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b506ad-be40-11ab-085c-4bf810836e62, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1923.296302] env[62684]: DEBUG oslo_vmware.api [None req-f821e8cc-c310-4031-9e00-35b428ac52a2 tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052845, 'name': PowerOffVM_Task, 'duration_secs': 0.218457} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1923.296302] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f821e8cc-c310-4031-9e00-35b428ac52a2 tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1923.296302] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f821e8cc-c310-4031-9e00-35b428ac52a2 tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1923.299481] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a5823d4e-0ac2-4f18-a823-5b2a4439449e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.343308] env[62684]: DEBUG oslo_vmware.api [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052843, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1923.369483] env[62684]: DEBUG oslo_vmware.api [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052846, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.116881} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1923.369771] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1923.371119] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d390953-c943-4626-83e6-4f6a4e6a8fcb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.396429] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] 274d214a-4b92-4900-a66c-54baea2a68f8/274d214a-4b92-4900-a66c-54baea2a68f8.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1923.396429] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-369006b5-039b-4173-9b1c-49361063dd2b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.417051] env[62684]: DEBUG oslo_vmware.api [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1923.417051] env[62684]: value = "task-2052849" [ 1923.417051] env[62684]: _type = "Task" [ 1923.417051] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1923.430839] env[62684]: DEBUG oslo_vmware.api [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052849, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1923.436367] env[62684]: DEBUG nova.scheduler.client.report [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Updated inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f with generation 81 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1923.436768] env[62684]: DEBUG nova.compute.provider_tree [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Updating resource provider c23c281e-ec1f-4876-972e-a98655f2084f generation from 81 to 82 during operation: update_inventory {{(pid=62684) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1923.436984] env[62684]: DEBUG nova.compute.provider_tree [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1923.440507] env[62684]: DEBUG oslo_vmware.api [None req-dd86c9b5-6e12-4259-acfe-47213cb678a9 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2052841, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1923.451745] env[62684]: DEBUG oslo_vmware.api [None req-d05225cf-8810-46b4-a8a7-e7b613f7f71f tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052847, 'name': PowerOffVM_Task, 'duration_secs': 0.294468} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1923.453293] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-d05225cf-8810-46b4-a8a7-e7b613f7f71f tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1923.453484] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-d05225cf-8810-46b4-a8a7-e7b613f7f71f tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1923.454630] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f821e8cc-c310-4031-9e00-35b428ac52a2 tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1923.454630] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f821e8cc-c310-4031-9e00-35b428ac52a2 tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1923.454630] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-f821e8cc-c310-4031-9e00-35b428ac52a2 tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Deleting the datastore file [datastore1] c6dc5401-f59e-4c18-9553-1240e2f49bce {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1923.454630] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f078f1f5-bae9-4b2a-8da7-68d22f4ecca4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.456323] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1fecaf6b-8d59-4028-9743-c68b49542ef3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.463460] env[62684]: DEBUG oslo_vmware.api [None req-f821e8cc-c310-4031-9e00-35b428ac52a2 tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Waiting for the task: (returnval){ [ 1923.463460] env[62684]: value = "task-2052850" [ 1923.463460] env[62684]: _type = "Task" [ 1923.463460] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1923.471944] env[62684]: DEBUG oslo_vmware.api [None req-f821e8cc-c310-4031-9e00-35b428ac52a2 tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052850, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1923.531762] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-d05225cf-8810-46b4-a8a7-e7b613f7f71f tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1923.532011] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-d05225cf-8810-46b4-a8a7-e7b613f7f71f tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1923.532108] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-d05225cf-8810-46b4-a8a7-e7b613f7f71f tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Deleting the datastore file [datastore2] 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1923.533260] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8b718133-fc21-4375-aa0c-88bb134de8c9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.540756] env[62684]: DEBUG oslo_vmware.api [None req-d05225cf-8810-46b4-a8a7-e7b613f7f71f tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Waiting for the task: (returnval){ [ 1923.540756] env[62684]: value = "task-2052852" [ 1923.540756] env[62684]: _type = "Task" [ 1923.540756] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1923.556416] env[62684]: DEBUG oslo_vmware.api [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b506ad-be40-11ab-085c-4bf810836e62, 'name': SearchDatastore_Task, 'duration_secs': 0.009704} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1923.559423] env[62684]: DEBUG oslo_vmware.api [None req-d05225cf-8810-46b4-a8a7-e7b613f7f71f tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052852, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1923.559927] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be1f5424-8f5f-4262-a3cc-38ba6b382476 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.565599] env[62684]: DEBUG oslo_vmware.api [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Waiting for the task: (returnval){ [ 1923.565599] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e43e65-c2d2-d5c6-f858-3d99ae96577a" [ 1923.565599] env[62684]: _type = "Task" [ 1923.565599] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1923.575312] env[62684]: DEBUG oslo_vmware.api [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e43e65-c2d2-d5c6-f858-3d99ae96577a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1923.841729] env[62684]: DEBUG oslo_vmware.api [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052843, 'name': PowerOffVM_Task, 'duration_secs': 1.09893} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1923.842065] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1923.842842] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75119929-73b6-4779-9fcd-54c55a9fefd6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.864138] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6a2df37-209b-46da-a1db-3088d31de013 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.898393] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1923.898719] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b69f965d-87e2-4e6b-a6bf-e18ef83e3ed8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.905561] env[62684]: DEBUG oslo_vmware.api [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1923.905561] env[62684]: value = "task-2052853" [ 1923.905561] env[62684]: _type = "Task" [ 1923.905561] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1923.913700] env[62684]: DEBUG oslo_vmware.api [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052853, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1923.928409] env[62684]: DEBUG oslo_vmware.api [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052849, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1923.931452] env[62684]: DEBUG oslo_vmware.api [None req-dd86c9b5-6e12-4259-acfe-47213cb678a9 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2052841, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1923.942860] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.996s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1923.944221] env[62684]: DEBUG nova.compute.manager [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1923.947133] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b7d40e89-e492-4d55-a633-7950cda4ce4b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.455s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1923.947133] env[62684]: DEBUG nova.objects.instance [None req-b7d40e89-e492-4d55-a633-7950cda4ce4b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Lazy-loading 'resources' on Instance uuid f44b2e88-af6d-4252-b562-9d5fa7745b56 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1923.974699] env[62684]: DEBUG oslo_vmware.api [None req-f821e8cc-c310-4031-9e00-35b428ac52a2 tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052850, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154896} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1923.975039] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-f821e8cc-c310-4031-9e00-35b428ac52a2 tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1923.975333] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f821e8cc-c310-4031-9e00-35b428ac52a2 tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1923.975520] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f821e8cc-c310-4031-9e00-35b428ac52a2 tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1923.975730] env[62684]: INFO nova.compute.manager [None req-f821e8cc-c310-4031-9e00-35b428ac52a2 tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Took 1.23 seconds to destroy the instance on the hypervisor. [ 1923.976200] env[62684]: DEBUG oslo.service.loopingcall [None req-f821e8cc-c310-4031-9e00-35b428ac52a2 tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1923.976350] env[62684]: DEBUG nova.compute.manager [-] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1923.976415] env[62684]: DEBUG nova.network.neutron [-] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1924.056475] env[62684]: DEBUG oslo_vmware.api [None req-d05225cf-8810-46b4-a8a7-e7b613f7f71f tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Task: {'id': task-2052852, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146797} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1924.056989] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-d05225cf-8810-46b4-a8a7-e7b613f7f71f tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1924.057276] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-d05225cf-8810-46b4-a8a7-e7b613f7f71f tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1924.057437] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-d05225cf-8810-46b4-a8a7-e7b613f7f71f tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1924.057623] env[62684]: INFO nova.compute.manager [None req-d05225cf-8810-46b4-a8a7-e7b613f7f71f tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1924.057866] env[62684]: DEBUG oslo.service.loopingcall [None req-d05225cf-8810-46b4-a8a7-e7b613f7f71f tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1924.058105] env[62684]: DEBUG nova.compute.manager [-] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1924.058173] env[62684]: DEBUG nova.network.neutron [-] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1924.075691] env[62684]: DEBUG oslo_vmware.api [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e43e65-c2d2-d5c6-f858-3d99ae96577a, 'name': SearchDatastore_Task, 'duration_secs': 0.018196} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1924.076036] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1924.076330] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 8449f09b-4e7b-4511-bb3c-2ff6667addb2/8449f09b-4e7b-4511-bb3c-2ff6667addb2.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1924.076602] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1116a61f-42cb-4729-8a0d-a07c83f5f8e2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.085433] env[62684]: DEBUG oslo_vmware.api [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Waiting for the task: (returnval){ [ 1924.085433] env[62684]: value = "task-2052854" [ 1924.085433] env[62684]: _type = "Task" [ 1924.085433] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1924.096929] env[62684]: DEBUG oslo_vmware.api [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': task-2052854, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1924.333613] env[62684]: DEBUG nova.compute.manager [req-63d8efe7-a282-4c7e-8400-ca3936cdfa2e req-42c09281-7aba-4d2d-9a4b-6c291909abce service nova] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Received event network-vif-deleted-c4406072-51a1-483b-89d6-d1b7ed992955 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1924.333874] env[62684]: INFO nova.compute.manager [req-63d8efe7-a282-4c7e-8400-ca3936cdfa2e req-42c09281-7aba-4d2d-9a4b-6c291909abce service nova] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Neutron deleted interface c4406072-51a1-483b-89d6-d1b7ed992955; detaching it from the instance and deleting it from the info cache [ 1924.334112] env[62684]: DEBUG nova.network.neutron [req-63d8efe7-a282-4c7e-8400-ca3936cdfa2e req-42c09281-7aba-4d2d-9a4b-6c291909abce service nova] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1924.427643] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] VM already powered off {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1924.428326] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1924.428326] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1924.428326] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1924.428531] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1924.429106] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-05e628fb-e89c-475f-bf25-fbdc7c8c4dbd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.437543] env[62684]: DEBUG oslo_vmware.api [None req-dd86c9b5-6e12-4259-acfe-47213cb678a9 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2052841, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1924.442136] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1924.442136] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1924.442136] env[62684]: DEBUG oslo_vmware.api [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052849, 'name': ReconfigVM_Task, 'duration_secs': 0.687991} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1924.442453] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e4ca1c5-3b5b-422c-a5a1-e7546b5ae2b0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.444763] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Reconfigured VM instance instance-00000032 to attach disk [datastore1] 274d214a-4b92-4900-a66c-54baea2a68f8/274d214a-4b92-4900-a66c-54baea2a68f8.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1924.446584] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5fd36371-fbef-4315-8860-1463a3c8ef51 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.449411] env[62684]: DEBUG nova.compute.manager [req-7149eec7-ed53-4b66-9554-5e374ef28023 req-b48219a7-f48c-4bf1-b697-105d5b6f43ff service nova] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Received event network-vif-deleted-be474f46-e2b0-4e78-af9e-c06c7e91756e {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1924.449411] env[62684]: INFO nova.compute.manager [req-7149eec7-ed53-4b66-9554-5e374ef28023 req-b48219a7-f48c-4bf1-b697-105d5b6f43ff service nova] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Neutron deleted interface be474f46-e2b0-4e78-af9e-c06c7e91756e; detaching it from the instance and deleting it from the info cache [ 1924.449777] env[62684]: DEBUG nova.network.neutron [req-7149eec7-ed53-4b66-9554-5e374ef28023 req-b48219a7-f48c-4bf1-b697-105d5b6f43ff service nova] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1924.454616] env[62684]: DEBUG nova.compute.utils [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1924.460834] env[62684]: DEBUG nova.compute.manager [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1924.461015] env[62684]: DEBUG nova.network.neutron [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1924.463027] env[62684]: DEBUG oslo_vmware.api [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1924.463027] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52233f0a-e361-f116-1ad8-c4f4e021a9cd" [ 1924.463027] env[62684]: _type = "Task" [ 1924.463027] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1924.468333] env[62684]: DEBUG oslo_vmware.api [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1924.468333] env[62684]: value = "task-2052855" [ 1924.468333] env[62684]: _type = "Task" [ 1924.468333] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1924.478016] env[62684]: DEBUG oslo_vmware.api [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52233f0a-e361-f116-1ad8-c4f4e021a9cd, 'name': SearchDatastore_Task, 'duration_secs': 0.010412} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1924.479617] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a46b1873-1d64-4448-a43c-d172bdf7c926 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.485373] env[62684]: DEBUG oslo_vmware.api [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052855, 'name': Rename_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1924.492260] env[62684]: DEBUG oslo_vmware.api [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1924.492260] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f7bf78-14d2-5c29-8aac-593da7816f14" [ 1924.492260] env[62684]: _type = "Task" [ 1924.492260] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1924.500932] env[62684]: DEBUG oslo_vmware.api [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f7bf78-14d2-5c29-8aac-593da7816f14, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1924.551211] env[62684]: DEBUG nova.policy [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4f9ace4d78b94a3db9eb74236fca1e6a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aef5d7061c834332b9f9c5c75596bf08', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1924.597014] env[62684]: DEBUG oslo_vmware.api [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': task-2052854, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1924.804873] env[62684]: DEBUG nova.network.neutron [-] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1924.838103] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-44944333-341c-4f8b-87db-d3ccce891645 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.852822] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7217fc82-9c35-4de5-9c49-e6251aa27642 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.895578] env[62684]: DEBUG nova.network.neutron [-] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1924.899748] env[62684]: DEBUG nova.compute.manager [req-63d8efe7-a282-4c7e-8400-ca3936cdfa2e req-42c09281-7aba-4d2d-9a4b-6c291909abce service nova] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Detach interface failed, port_id=c4406072-51a1-483b-89d6-d1b7ed992955, reason: Instance 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7 could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1924.922176] env[62684]: DEBUG nova.network.neutron [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Successfully created port: c0c87a54-475a-48ca-96cc-988f06008d07 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1924.934645] env[62684]: DEBUG oslo_vmware.api [None req-dd86c9b5-6e12-4259-acfe-47213cb678a9 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2052841, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1924.962432] env[62684]: DEBUG nova.compute.manager [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1924.966838] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-34799793-03f3-4d5c-bc57-d52748f4e459 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.981141] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37fcc90a-4e18-48c7-9004-5b159226b719 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.006517] env[62684]: DEBUG oslo_vmware.api [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052855, 'name': Rename_Task, 'duration_secs': 0.155926} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1925.010519] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1925.010783] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-029d6895-f0de-4716-bbff-7f452871e141 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.018496] env[62684]: DEBUG oslo_vmware.api [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f7bf78-14d2-5c29-8aac-593da7816f14, 'name': SearchDatastore_Task, 'duration_secs': 0.058899} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1925.027056] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1925.027056] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] b1f70e39-bf37-4fb8-b95b-653b59bec265/3931321c-cb4c-4b87-8d3a-50e05ea01db2-rescue.vmdk. {{(pid=62684) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1925.027979] env[62684]: DEBUG oslo_vmware.api [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1925.027979] env[62684]: value = "task-2052856" [ 1925.027979] env[62684]: _type = "Task" [ 1925.027979] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1925.040287] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5ccb98fe-381f-4e77-80be-dadc5b8f20aa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.044878] env[62684]: DEBUG nova.compute.manager [req-7149eec7-ed53-4b66-9554-5e374ef28023 req-b48219a7-f48c-4bf1-b697-105d5b6f43ff service nova] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Detach interface failed, port_id=be474f46-e2b0-4e78-af9e-c06c7e91756e, reason: Instance c6dc5401-f59e-4c18-9553-1240e2f49bce could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1925.056025] env[62684]: DEBUG oslo_vmware.api [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052856, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.057476] env[62684]: DEBUG oslo_vmware.api [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1925.057476] env[62684]: value = "task-2052857" [ 1925.057476] env[62684]: _type = "Task" [ 1925.057476] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1925.066131] env[62684]: DEBUG oslo_vmware.api [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052857, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.099225] env[62684]: DEBUG oslo_vmware.api [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': task-2052854, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.729386} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1925.099225] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 8449f09b-4e7b-4511-bb3c-2ff6667addb2/8449f09b-4e7b-4511-bb3c-2ff6667addb2.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1925.099225] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1925.099225] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5974a2e3-8748-43d2-8e34-7298c93ff5d6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.105242] env[62684]: DEBUG oslo_vmware.api [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Waiting for the task: (returnval){ [ 1925.105242] env[62684]: value = "task-2052858" [ 1925.105242] env[62684]: _type = "Task" [ 1925.105242] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1925.114362] env[62684]: DEBUG oslo_vmware.api [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': task-2052858, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.115296] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09b5ecd6-05a5-46a7-9561-f25df1036ecf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.122580] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8efc3895-ed43-4171-a5cc-c87391e75aa7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.154950] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cc2d2b3-ac6b-490e-a87e-12fa2c67da48 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.163058] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93382ab7-5189-4f2a-b0ee-018e857b1ad9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.177796] env[62684]: DEBUG nova.compute.provider_tree [None req-b7d40e89-e492-4d55-a633-7950cda4ce4b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1925.308399] env[62684]: INFO nova.compute.manager [-] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Took 1.25 seconds to deallocate network for instance. [ 1925.401853] env[62684]: INFO nova.compute.manager [-] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Took 1.42 seconds to deallocate network for instance. [ 1925.436888] env[62684]: DEBUG oslo_vmware.api [None req-dd86c9b5-6e12-4259-acfe-47213cb678a9 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2052841, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.554160] env[62684]: DEBUG oslo_vmware.api [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052856, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.570448] env[62684]: DEBUG oslo_vmware.api [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052857, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.621137] env[62684]: DEBUG oslo_vmware.api [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': task-2052858, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.277709} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1925.621733] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1925.622732] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09677634-ef47-40d7-80df-b1a37da33e6b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.647784] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Reconfiguring VM instance instance-00000031 to attach disk [datastore1] 8449f09b-4e7b-4511-bb3c-2ff6667addb2/8449f09b-4e7b-4511-bb3c-2ff6667addb2.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1925.649128] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-27329c1f-afee-4a3d-8432-06fdc9341d0f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.668739] env[62684]: DEBUG oslo_vmware.api [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Waiting for the task: (returnval){ [ 1925.668739] env[62684]: value = "task-2052859" [ 1925.668739] env[62684]: _type = "Task" [ 1925.668739] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1925.678337] env[62684]: DEBUG oslo_vmware.api [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': task-2052859, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.714414] env[62684]: DEBUG nova.scheduler.client.report [None req-b7d40e89-e492-4d55-a633-7950cda4ce4b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Updated inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f with generation 82 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1925.714707] env[62684]: DEBUG nova.compute.provider_tree [None req-b7d40e89-e492-4d55-a633-7950cda4ce4b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Updating resource provider c23c281e-ec1f-4876-972e-a98655f2084f generation from 82 to 83 during operation: update_inventory {{(pid=62684) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1925.714940] env[62684]: DEBUG nova.compute.provider_tree [None req-b7d40e89-e492-4d55-a633-7950cda4ce4b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1925.822036] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d05225cf-8810-46b4-a8a7-e7b613f7f71f tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1925.910959] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f821e8cc-c310-4031-9e00-35b428ac52a2 tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1925.937433] env[62684]: DEBUG oslo_vmware.api [None req-dd86c9b5-6e12-4259-acfe-47213cb678a9 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2052841, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.973347] env[62684]: DEBUG nova.compute.manager [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1926.005695] env[62684]: DEBUG nova.virt.hardware [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1926.006052] env[62684]: DEBUG nova.virt.hardware [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1926.006290] env[62684]: DEBUG nova.virt.hardware [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1926.006538] env[62684]: DEBUG nova.virt.hardware [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1926.006736] env[62684]: DEBUG nova.virt.hardware [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1926.006952] env[62684]: DEBUG nova.virt.hardware [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1926.007296] env[62684]: DEBUG nova.virt.hardware [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1926.007503] env[62684]: DEBUG nova.virt.hardware [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1926.007733] env[62684]: DEBUG nova.virt.hardware [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1926.007954] env[62684]: DEBUG nova.virt.hardware [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1926.008751] env[62684]: DEBUG nova.virt.hardware [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1926.009691] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5e90ea1-7c78-43a5-ad06-13cffd8cc72d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.018102] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02cf1475-8d7f-4ca6-b957-b79653ab0d14 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.052635] env[62684]: DEBUG oslo_vmware.api [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052856, 'name': PowerOnVM_Task, 'duration_secs': 0.627367} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1926.052921] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1926.053172] env[62684]: INFO nova.compute.manager [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Took 8.32 seconds to spawn the instance on the hypervisor. [ 1926.053482] env[62684]: DEBUG nova.compute.manager [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1926.054380] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2a19370-a3bf-4712-a688-ee7b9ff7a038 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.073352] env[62684]: DEBUG oslo_vmware.api [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052857, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.180069] env[62684]: DEBUG oslo_vmware.api [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': task-2052859, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.220212] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b7d40e89-e492-4d55-a633-7950cda4ce4b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.273s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1926.225258] env[62684]: DEBUG oslo_concurrency.lockutils [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 42.256s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1926.225597] env[62684]: DEBUG nova.objects.instance [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Lazy-loading 'resources' on Instance uuid 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1926.249317] env[62684]: INFO nova.scheduler.client.report [None req-b7d40e89-e492-4d55-a633-7950cda4ce4b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Deleted allocations for instance f44b2e88-af6d-4252-b562-9d5fa7745b56 [ 1926.438346] env[62684]: DEBUG oslo_vmware.api [None req-dd86c9b5-6e12-4259-acfe-47213cb678a9 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2052841, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.473288] env[62684]: DEBUG nova.compute.manager [req-dfcb7866-b732-4d62-8695-4523ba3870ee req-0c64fe62-12d2-442b-ad34-f2f210b21cf2 service nova] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Received event network-vif-plugged-c0c87a54-475a-48ca-96cc-988f06008d07 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1926.473437] env[62684]: DEBUG oslo_concurrency.lockutils [req-dfcb7866-b732-4d62-8695-4523ba3870ee req-0c64fe62-12d2-442b-ad34-f2f210b21cf2 service nova] Acquiring lock "e3dd1bc0-f292-4ac7-a8db-324887a18411-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1926.473731] env[62684]: DEBUG oslo_concurrency.lockutils [req-dfcb7866-b732-4d62-8695-4523ba3870ee req-0c64fe62-12d2-442b-ad34-f2f210b21cf2 service nova] Lock "e3dd1bc0-f292-4ac7-a8db-324887a18411-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1926.473935] env[62684]: DEBUG oslo_concurrency.lockutils [req-dfcb7866-b732-4d62-8695-4523ba3870ee req-0c64fe62-12d2-442b-ad34-f2f210b21cf2 service nova] Lock "e3dd1bc0-f292-4ac7-a8db-324887a18411-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1926.474211] env[62684]: DEBUG nova.compute.manager [req-dfcb7866-b732-4d62-8695-4523ba3870ee req-0c64fe62-12d2-442b-ad34-f2f210b21cf2 service nova] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] No waiting events found dispatching network-vif-plugged-c0c87a54-475a-48ca-96cc-988f06008d07 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1926.474403] env[62684]: WARNING nova.compute.manager [req-dfcb7866-b732-4d62-8695-4523ba3870ee req-0c64fe62-12d2-442b-ad34-f2f210b21cf2 service nova] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Received unexpected event network-vif-plugged-c0c87a54-475a-48ca-96cc-988f06008d07 for instance with vm_state building and task_state spawning. [ 1926.576022] env[62684]: DEBUG oslo_vmware.api [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052857, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.461873} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1926.576022] env[62684]: INFO nova.virt.vmwareapi.ds_util [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] b1f70e39-bf37-4fb8-b95b-653b59bec265/3931321c-cb4c-4b87-8d3a-50e05ea01db2-rescue.vmdk. [ 1926.576022] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f422233-7c1b-4a93-9754-585199e70382 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.579591] env[62684]: INFO nova.compute.manager [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Took 52.88 seconds to build instance. [ 1926.608490] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] b1f70e39-bf37-4fb8-b95b-653b59bec265/3931321c-cb4c-4b87-8d3a-50e05ea01db2-rescue.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1926.610055] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a99fcc77-d26d-4b02-add4-74deda266e15 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.633593] env[62684]: DEBUG oslo_vmware.api [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1926.633593] env[62684]: value = "task-2052860" [ 1926.633593] env[62684]: _type = "Task" [ 1926.633593] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.642175] env[62684]: DEBUG oslo_vmware.api [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052860, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.679509] env[62684]: DEBUG nova.network.neutron [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Successfully updated port: c0c87a54-475a-48ca-96cc-988f06008d07 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1926.689733] env[62684]: DEBUG oslo_vmware.api [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': task-2052859, 'name': ReconfigVM_Task, 'duration_secs': 0.966318} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1926.689733] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Reconfigured VM instance instance-00000031 to attach disk [datastore1] 8449f09b-4e7b-4511-bb3c-2ff6667addb2/8449f09b-4e7b-4511-bb3c-2ff6667addb2.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1926.689733] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e923f019-534a-4c6c-b799-578c32b62248 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.694845] env[62684]: DEBUG oslo_vmware.api [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Waiting for the task: (returnval){ [ 1926.694845] env[62684]: value = "task-2052861" [ 1926.694845] env[62684]: _type = "Task" [ 1926.694845] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.707085] env[62684]: DEBUG oslo_vmware.api [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': task-2052861, 'name': Rename_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.757080] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b7d40e89-e492-4d55-a633-7950cda4ce4b tempest-ServersTestJSON-1246500318 tempest-ServersTestJSON-1246500318-project-member] Lock "f44b2e88-af6d-4252-b562-9d5fa7745b56" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 47.787s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1926.939494] env[62684]: DEBUG oslo_vmware.api [None req-dd86c9b5-6e12-4259-acfe-47213cb678a9 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2052841, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.081659] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b98ccfeb-5997-45d3-a05f-0f61e5afc778 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "274d214a-4b92-4900-a66c-54baea2a68f8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.815s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1927.146601] env[62684]: DEBUG oslo_vmware.api [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052860, 'name': ReconfigVM_Task, 'duration_secs': 0.253716} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1927.146889] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Reconfigured VM instance instance-0000002d to attach disk [datastore1] b1f70e39-bf37-4fb8-b95b-653b59bec265/3931321c-cb4c-4b87-8d3a-50e05ea01db2-rescue.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1927.148486] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-696f17b9-8f15-4eb3-ab19-822d58d9395a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.178154] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5197c500-5335-43a5-9b5c-7ffdd947ac31 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.189261] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquiring lock "refresh_cache-e3dd1bc0-f292-4ac7-a8db-324887a18411" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1927.189261] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquired lock "refresh_cache-e3dd1bc0-f292-4ac7-a8db-324887a18411" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1927.189261] env[62684]: DEBUG nova.network.neutron [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1927.195755] env[62684]: DEBUG oslo_vmware.api [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1927.195755] env[62684]: value = "task-2052862" [ 1927.195755] env[62684]: _type = "Task" [ 1927.195755] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1927.210831] env[62684]: DEBUG oslo_vmware.api [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': task-2052861, 'name': Rename_Task, 'duration_secs': 0.120797} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1927.215123] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1927.215773] env[62684]: DEBUG oslo_vmware.api [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052862, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.216181] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c1f6944c-0a75-4d99-b992-b1f4cd06bc61 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.223349] env[62684]: DEBUG oslo_vmware.api [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Waiting for the task: (returnval){ [ 1927.223349] env[62684]: value = "task-2052863" [ 1927.223349] env[62684]: _type = "Task" [ 1927.223349] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1927.231893] env[62684]: DEBUG oslo_vmware.api [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': task-2052863, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.248560] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94ab33bc-9f8e-4592-9af2-448067b56f5f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.257810] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad2d89cc-e22f-4b78-b34c-596eb0289a31 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.307724] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-558b5c2d-4863-4ec8-ac5e-ec30a6201ede {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.318660] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2115269c-b8e6-443c-8ec7-a6a7435b5401 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.333075] env[62684]: DEBUG nova.compute.provider_tree [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1927.438798] env[62684]: DEBUG oslo_vmware.api [None req-dd86c9b5-6e12-4259-acfe-47213cb678a9 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2052841, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.588169] env[62684]: DEBUG nova.compute.manager [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1927.716666] env[62684]: DEBUG oslo_vmware.api [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052862, 'name': ReconfigVM_Task, 'duration_secs': 0.148019} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1927.716972] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1927.717271] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cfa68c36-be4a-43cf-9588-52a18a3fc57c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.729739] env[62684]: DEBUG oslo_vmware.api [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1927.729739] env[62684]: value = "task-2052864" [ 1927.729739] env[62684]: _type = "Task" [ 1927.729739] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1927.739959] env[62684]: DEBUG oslo_vmware.api [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': task-2052863, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.744117] env[62684]: DEBUG oslo_vmware.api [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052864, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.763374] env[62684]: DEBUG nova.network.neutron [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1927.836666] env[62684]: DEBUG nova.scheduler.client.report [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1927.940992] env[62684]: DEBUG oslo_vmware.api [None req-dd86c9b5-6e12-4259-acfe-47213cb678a9 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2052841, 'name': ReconfigVM_Task, 'duration_secs': 5.760754} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1927.941332] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dd86c9b5-6e12-4259-acfe-47213cb678a9 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Releasing lock "4e5152b0-7bac-4dc2-b6c7-6590fa2d5978" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1927.941591] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-dd86c9b5-6e12-4259-acfe-47213cb678a9 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Reconfigured VM to detach interface {{(pid=62684) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1928.004026] env[62684]: DEBUG nova.network.neutron [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Updating instance_info_cache with network_info: [{"id": "c0c87a54-475a-48ca-96cc-988f06008d07", "address": "fa:16:3e:63:f9:9d", "network": {"id": "bca0ee43-bbb1-483b-9d82-56955369f9b7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1592250106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aef5d7061c834332b9f9c5c75596bf08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bfa7abe-7e46-4d8f-b50a-4d0c4509e4dc", "external-id": "nsx-vlan-transportzone-951", "segmentation_id": 951, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0c87a54-47", "ovs_interfaceid": "c0c87a54-475a-48ca-96cc-988f06008d07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1928.121515] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1928.246829] env[62684]: DEBUG oslo_vmware.api [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': task-2052863, 'name': PowerOnVM_Task, 'duration_secs': 0.519961} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1928.247506] env[62684]: DEBUG oslo_vmware.api [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052864, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.247781] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1928.248160] env[62684]: DEBUG nova.compute.manager [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1928.249197] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8447f5fa-4d8a-46a8-98f5-dba33bb815f1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.341764] env[62684]: DEBUG oslo_concurrency.lockutils [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.116s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1928.345143] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 41.873s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1928.347205] env[62684]: INFO nova.compute.claims [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1928.370471] env[62684]: INFO nova.scheduler.client.report [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Deleted allocations for instance 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b [ 1928.496350] env[62684]: DEBUG nova.compute.manager [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1928.497442] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f40886fe-366e-472c-a73c-257605e6783c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.510294] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Releasing lock "refresh_cache-e3dd1bc0-f292-4ac7-a8db-324887a18411" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1928.510647] env[62684]: DEBUG nova.compute.manager [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Instance network_info: |[{"id": "c0c87a54-475a-48ca-96cc-988f06008d07", "address": "fa:16:3e:63:f9:9d", "network": {"id": "bca0ee43-bbb1-483b-9d82-56955369f9b7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1592250106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aef5d7061c834332b9f9c5c75596bf08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bfa7abe-7e46-4d8f-b50a-4d0c4509e4dc", "external-id": "nsx-vlan-transportzone-951", "segmentation_id": 951, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0c87a54-47", "ovs_interfaceid": "c0c87a54-475a-48ca-96cc-988f06008d07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1928.512338] env[62684]: DEBUG nova.compute.manager [req-aa25e1c1-fd0a-4ee7-bdae-d6c1bab2f872 req-9da9a889-acee-4a0d-bbc6-4055fa15705c service nova] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Received event network-vif-deleted-57b8942c-9a9b-4d95-bc8c-f4367c62ba17 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1928.512652] env[62684]: INFO nova.compute.manager [req-aa25e1c1-fd0a-4ee7-bdae-d6c1bab2f872 req-9da9a889-acee-4a0d-bbc6-4055fa15705c service nova] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Neutron deleted interface 57b8942c-9a9b-4d95-bc8c-f4367c62ba17; detaching it from the instance and deleting it from the info cache [ 1928.513039] env[62684]: DEBUG nova.network.neutron [req-aa25e1c1-fd0a-4ee7-bdae-d6c1bab2f872 req-9da9a889-acee-4a0d-bbc6-4055fa15705c service nova] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Updating instance_info_cache with network_info: [{"id": "220c5589-f035-4097-8c0d-dfd565a9203a", "address": "fa:16:3e:3e:7c:34", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.235", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap220c5589-f0", "ovs_interfaceid": "220c5589-f035-4097-8c0d-dfd565a9203a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1928.515580] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:63:f9:9d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8bfa7abe-7e46-4d8f-b50a-4d0c4509e4dc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c0c87a54-475a-48ca-96cc-988f06008d07', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1928.525136] env[62684]: DEBUG oslo.service.loopingcall [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1928.527747] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1928.532658] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-18fc9b78-2a59-4039-9479-9708a5362932 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.554418] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1928.554418] env[62684]: value = "task-2052865" [ 1928.554418] env[62684]: _type = "Task" [ 1928.554418] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1928.567854] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052865, 'name': CreateVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.593161] env[62684]: DEBUG nova.compute.manager [req-310c262b-d7b6-45db-a881-028575d8cf6c req-7af77c5f-c403-4c39-8ee2-53d347e6a1b8 service nova] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Received event network-changed-c0c87a54-475a-48ca-96cc-988f06008d07 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1928.593161] env[62684]: DEBUG nova.compute.manager [req-310c262b-d7b6-45db-a881-028575d8cf6c req-7af77c5f-c403-4c39-8ee2-53d347e6a1b8 service nova] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Refreshing instance network info cache due to event network-changed-c0c87a54-475a-48ca-96cc-988f06008d07. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1928.593161] env[62684]: DEBUG oslo_concurrency.lockutils [req-310c262b-d7b6-45db-a881-028575d8cf6c req-7af77c5f-c403-4c39-8ee2-53d347e6a1b8 service nova] Acquiring lock "refresh_cache-e3dd1bc0-f292-4ac7-a8db-324887a18411" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1928.593161] env[62684]: DEBUG oslo_concurrency.lockutils [req-310c262b-d7b6-45db-a881-028575d8cf6c req-7af77c5f-c403-4c39-8ee2-53d347e6a1b8 service nova] Acquired lock "refresh_cache-e3dd1bc0-f292-4ac7-a8db-324887a18411" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1928.593161] env[62684]: DEBUG nova.network.neutron [req-310c262b-d7b6-45db-a881-028575d8cf6c req-7af77c5f-c403-4c39-8ee2-53d347e6a1b8 service nova] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Refreshing network info cache for port c0c87a54-475a-48ca-96cc-988f06008d07 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1928.746330] env[62684]: DEBUG oslo_vmware.api [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052864, 'name': PowerOnVM_Task, 'duration_secs': 0.607859} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1928.746809] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1928.751792] env[62684]: DEBUG nova.compute.manager [None req-4dac8e33-c0f4-48df-963d-eab058ffdd68 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1928.753059] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6de4a44a-4786-4396-8af6-917eeb80c10d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.773609] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1928.894461] env[62684]: DEBUG oslo_concurrency.lockutils [None req-13248c62-a440-40a9-8864-b504b1dd5c2b tempest-ServersTestBootFromVolume-630331238 tempest-ServersTestBootFromVolume-630331238-project-member] Lock "5bc73032-45f9-4b5c-a4ea-e07c48e4f82b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 50.015s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1929.027576] env[62684]: DEBUG oslo_concurrency.lockutils [req-aa25e1c1-fd0a-4ee7-bdae-d6c1bab2f872 req-9da9a889-acee-4a0d-bbc6-4055fa15705c service nova] Acquiring lock "4e5152b0-7bac-4dc2-b6c7-6590fa2d5978" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1929.027903] env[62684]: DEBUG oslo_concurrency.lockutils [req-aa25e1c1-fd0a-4ee7-bdae-d6c1bab2f872 req-9da9a889-acee-4a0d-bbc6-4055fa15705c service nova] Acquired lock "4e5152b0-7bac-4dc2-b6c7-6590fa2d5978" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1929.028861] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34f5fb8e-55f1-4701-a3a9-08f79c731b80 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.059282] env[62684]: INFO nova.compute.manager [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] instance snapshotting [ 1929.061328] env[62684]: DEBUG oslo_concurrency.lockutils [req-aa25e1c1-fd0a-4ee7-bdae-d6c1bab2f872 req-9da9a889-acee-4a0d-bbc6-4055fa15705c service nova] Releasing lock "4e5152b0-7bac-4dc2-b6c7-6590fa2d5978" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1929.061634] env[62684]: WARNING nova.compute.manager [req-aa25e1c1-fd0a-4ee7-bdae-d6c1bab2f872 req-9da9a889-acee-4a0d-bbc6-4055fa15705c service nova] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Detach interface failed, port_id=57b8942c-9a9b-4d95-bc8c-f4367c62ba17, reason: No device with interface-id 57b8942c-9a9b-4d95-bc8c-f4367c62ba17 exists on VM: nova.exception.NotFound: No device with interface-id 57b8942c-9a9b-4d95-bc8c-f4367c62ba17 exists on VM [ 1929.068350] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d8ee055-2fe8-4062-a5cb-d1a0a29a03a3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.094748] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dca0554-ef98-4439-9c6b-ba7b6dd341d2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.098099] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052865, 'name': CreateVM_Task, 'duration_secs': 0.516266} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1929.100251] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1929.101566] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1929.101792] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1929.102155] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1929.102744] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6a68cc6-0c5f-4b8a-bc74-6630d41c7601 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.112776] env[62684]: DEBUG oslo_vmware.api [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1929.112776] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5209054e-2550-eb6b-8b01-ab983fbd2cb7" [ 1929.112776] env[62684]: _type = "Task" [ 1929.112776] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1929.120804] env[62684]: DEBUG oslo_vmware.api [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5209054e-2550-eb6b-8b01-ab983fbd2cb7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.394424] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dd86c9b5-6e12-4259-acfe-47213cb678a9 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "refresh_cache-4e5152b0-7bac-4dc2-b6c7-6590fa2d5978" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1929.394424] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dd86c9b5-6e12-4259-acfe-47213cb678a9 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquired lock "refresh_cache-4e5152b0-7bac-4dc2-b6c7-6590fa2d5978" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1929.394424] env[62684]: DEBUG nova.network.neutron [None req-dd86c9b5-6e12-4259-acfe-47213cb678a9 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1929.613559] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Creating Snapshot of the VM instance {{(pid=62684) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1929.615026] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-8939a059-7f29-4caf-a545-753f3942730b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.630079] env[62684]: DEBUG oslo_vmware.api [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5209054e-2550-eb6b-8b01-ab983fbd2cb7, 'name': SearchDatastore_Task, 'duration_secs': 0.009726} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1929.633976] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1929.634323] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1929.635011] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1929.635011] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1929.635011] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1929.635387] env[62684]: DEBUG oslo_vmware.api [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1929.635387] env[62684]: value = "task-2052866" [ 1929.635387] env[62684]: _type = "Task" [ 1929.635387] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1929.635808] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-925dbd64-32b4-4309-9769-31106329c118 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.647733] env[62684]: DEBUG oslo_vmware.api [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052866, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.652534] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1929.652534] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1929.653274] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c950f8e-e828-4753-8be0-48f92beb98fa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.658814] env[62684]: DEBUG oslo_vmware.api [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1929.658814] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523018cd-3cfd-c143-af8d-59e9b6f83390" [ 1929.658814] env[62684]: _type = "Task" [ 1929.658814] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1929.668132] env[62684]: DEBUG oslo_vmware.api [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523018cd-3cfd-c143-af8d-59e9b6f83390, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.845394] env[62684]: DEBUG nova.network.neutron [req-310c262b-d7b6-45db-a881-028575d8cf6c req-7af77c5f-c403-4c39-8ee2-53d347e6a1b8 service nova] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Updated VIF entry in instance network info cache for port c0c87a54-475a-48ca-96cc-988f06008d07. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1929.846454] env[62684]: DEBUG nova.network.neutron [req-310c262b-d7b6-45db-a881-028575d8cf6c req-7af77c5f-c403-4c39-8ee2-53d347e6a1b8 service nova] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Updating instance_info_cache with network_info: [{"id": "c0c87a54-475a-48ca-96cc-988f06008d07", "address": "fa:16:3e:63:f9:9d", "network": {"id": "bca0ee43-bbb1-483b-9d82-56955369f9b7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1592250106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aef5d7061c834332b9f9c5c75596bf08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bfa7abe-7e46-4d8f-b50a-4d0c4509e4dc", "external-id": "nsx-vlan-transportzone-951", "segmentation_id": 951, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0c87a54-47", "ovs_interfaceid": "c0c87a54-475a-48ca-96cc-988f06008d07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1929.914986] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e33e68b-19a4-4d43-96f5-1ed3fd986096 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.922636] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e76a8c72-78a6-42de-af20-70396a8b9adf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.957986] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a78b6c8-e749-4c21-8e57-8d6cc0dd22cf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.965839] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7729f8c2-5a19-4996-9b2e-a7dad01d28d5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.982061] env[62684]: DEBUG nova.compute.provider_tree [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1930.150575] env[62684]: DEBUG oslo_vmware.api [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052866, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.171891] env[62684]: DEBUG oslo_vmware.api [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523018cd-3cfd-c143-af8d-59e9b6f83390, 'name': SearchDatastore_Task, 'duration_secs': 0.014929} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1930.173208] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3cca614-1d3e-4ccc-99e6-fbb67c9d3044 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.179561] env[62684]: DEBUG oslo_vmware.api [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1930.179561] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52220918-ce7a-9333-39b7-9ed54776897f" [ 1930.179561] env[62684]: _type = "Task" [ 1930.179561] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1930.189052] env[62684]: DEBUG oslo_vmware.api [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52220918-ce7a-9333-39b7-9ed54776897f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.293168] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9b058a39-119a-40b8-9a4b-3032d26bbc5c tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Acquiring lock "8449f09b-4e7b-4511-bb3c-2ff6667addb2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1930.293647] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9b058a39-119a-40b8-9a4b-3032d26bbc5c tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Lock "8449f09b-4e7b-4511-bb3c-2ff6667addb2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1930.294515] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9b058a39-119a-40b8-9a4b-3032d26bbc5c tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Acquiring lock "8449f09b-4e7b-4511-bb3c-2ff6667addb2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1930.294515] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9b058a39-119a-40b8-9a4b-3032d26bbc5c tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Lock "8449f09b-4e7b-4511-bb3c-2ff6667addb2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1930.294515] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9b058a39-119a-40b8-9a4b-3032d26bbc5c tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Lock "8449f09b-4e7b-4511-bb3c-2ff6667addb2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1930.297231] env[62684]: INFO nova.compute.manager [None req-9b058a39-119a-40b8-9a4b-3032d26bbc5c tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Terminating instance [ 1930.299045] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9b058a39-119a-40b8-9a4b-3032d26bbc5c tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Acquiring lock "refresh_cache-8449f09b-4e7b-4511-bb3c-2ff6667addb2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1930.299311] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9b058a39-119a-40b8-9a4b-3032d26bbc5c tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Acquired lock "refresh_cache-8449f09b-4e7b-4511-bb3c-2ff6667addb2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1930.299610] env[62684]: DEBUG nova.network.neutron [None req-9b058a39-119a-40b8-9a4b-3032d26bbc5c tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1930.348618] env[62684]: DEBUG oslo_concurrency.lockutils [req-310c262b-d7b6-45db-a881-028575d8cf6c req-7af77c5f-c403-4c39-8ee2-53d347e6a1b8 service nova] Releasing lock "refresh_cache-e3dd1bc0-f292-4ac7-a8db-324887a18411" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1930.495129] env[62684]: DEBUG nova.scheduler.client.report [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1930.589860] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "4e5152b0-7bac-4dc2-b6c7-6590fa2d5978" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1930.590230] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "4e5152b0-7bac-4dc2-b6c7-6590fa2d5978" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1930.590515] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "4e5152b0-7bac-4dc2-b6c7-6590fa2d5978-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1930.591403] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "4e5152b0-7bac-4dc2-b6c7-6590fa2d5978-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1930.591833] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "4e5152b0-7bac-4dc2-b6c7-6590fa2d5978-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1930.594580] env[62684]: INFO nova.compute.manager [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Terminating instance [ 1930.597095] env[62684]: DEBUG nova.compute.manager [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1930.597368] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1930.598507] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-223af761-5fc4-4ece-b6a3-359ab196e100 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.608028] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1930.608359] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e37777e3-c149-4928-b7c2-f7d437160b8f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.615844] env[62684]: DEBUG oslo_vmware.api [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 1930.615844] env[62684]: value = "task-2052867" [ 1930.615844] env[62684]: _type = "Task" [ 1930.615844] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1930.627100] env[62684]: DEBUG oslo_vmware.api [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2052867, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.635105] env[62684]: DEBUG nova.network.neutron [None req-dd86c9b5-6e12-4259-acfe-47213cb678a9 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Updating instance_info_cache with network_info: [{"id": "220c5589-f035-4097-8c0d-dfd565a9203a", "address": "fa:16:3e:3e:7c:34", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.235", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap220c5589-f0", "ovs_interfaceid": "220c5589-f035-4097-8c0d-dfd565a9203a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1930.651497] env[62684]: DEBUG oslo_vmware.api [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052866, 'name': CreateSnapshot_Task, 'duration_secs': 0.869823} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1930.651889] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Created Snapshot of the VM instance {{(pid=62684) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1930.653315] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd2402f5-debf-4f4f-8498-130f76212759 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.693042] env[62684]: DEBUG oslo_vmware.api [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52220918-ce7a-9333-39b7-9ed54776897f, 'name': SearchDatastore_Task, 'duration_secs': 0.009805} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1930.693847] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1930.695779] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] e3dd1bc0-f292-4ac7-a8db-324887a18411/e3dd1bc0-f292-4ac7-a8db-324887a18411.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1930.695779] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3787e273-c701-4780-843e-4f1f0e4e524c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.702716] env[62684]: DEBUG oslo_vmware.api [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1930.702716] env[62684]: value = "task-2052868" [ 1930.702716] env[62684]: _type = "Task" [ 1930.702716] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1930.715205] env[62684]: DEBUG oslo_vmware.api [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052868, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.849136] env[62684]: DEBUG nova.network.neutron [None req-9b058a39-119a-40b8-9a4b-3032d26bbc5c tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1931.006186] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.659s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1931.006186] env[62684]: DEBUG nova.compute.manager [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1931.010106] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 42.665s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1931.013054] env[62684]: INFO nova.compute.claims [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1931.127650] env[62684]: DEBUG oslo_vmware.api [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2052867, 'name': PowerOffVM_Task, 'duration_secs': 0.238143} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1931.128030] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1931.128628] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1931.128628] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-737c0cd2-ffbb-4c11-a090-d6ca6aee32ee {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.139360] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dd86c9b5-6e12-4259-acfe-47213cb678a9 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Releasing lock "refresh_cache-4e5152b0-7bac-4dc2-b6c7-6590fa2d5978" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1931.174596] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Creating linked-clone VM from snapshot {{(pid=62684) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1931.174825] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ae310411-b936-4e41-aca4-41209a0e7dfb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.184548] env[62684]: DEBUG oslo_vmware.api [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1931.184548] env[62684]: value = "task-2052870" [ 1931.184548] env[62684]: _type = "Task" [ 1931.184548] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1931.194919] env[62684]: DEBUG oslo_vmware.api [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052870, 'name': CloneVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1931.196913] env[62684]: DEBUG nova.network.neutron [None req-9b058a39-119a-40b8-9a4b-3032d26bbc5c tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1931.215644] env[62684]: DEBUG oslo_vmware.api [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052868, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.476881} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1931.216995] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] e3dd1bc0-f292-4ac7-a8db-324887a18411/e3dd1bc0-f292-4ac7-a8db-324887a18411.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1931.217239] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1931.217768] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b45e6e3a-2ceb-480d-8075-c65ef0021bbe {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.225913] env[62684]: DEBUG oslo_vmware.api [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1931.225913] env[62684]: value = "task-2052871" [ 1931.225913] env[62684]: _type = "Task" [ 1931.225913] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1931.232315] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1931.232606] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1931.232847] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Deleting the datastore file [datastore2] 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1931.233120] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8c984738-8069-4a7f-9756-2413aa072032 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.238311] env[62684]: DEBUG oslo_vmware.api [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052871, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1931.241066] env[62684]: DEBUG oslo_vmware.api [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 1931.241066] env[62684]: value = "task-2052872" [ 1931.241066] env[62684]: _type = "Task" [ 1931.241066] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1931.249936] env[62684]: DEBUG oslo_vmware.api [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2052872, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1931.526549] env[62684]: DEBUG nova.compute.utils [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1931.528642] env[62684]: DEBUG nova.compute.manager [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1931.528817] env[62684]: DEBUG nova.network.neutron [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1931.632391] env[62684]: DEBUG nova.policy [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '711b8497177c40c697d373ab866e3cb7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '72ac36eda47d4c51a4b421c764d0404d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1931.646422] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dd86c9b5-6e12-4259-acfe-47213cb678a9 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "interface-4e5152b0-7bac-4dc2-b6c7-6590fa2d5978-57b8942c-9a9b-4d95-bc8c-f4367c62ba17" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.312s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1931.696920] env[62684]: DEBUG oslo_vmware.api [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052870, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1931.703685] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9b058a39-119a-40b8-9a4b-3032d26bbc5c tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Releasing lock "refresh_cache-8449f09b-4e7b-4511-bb3c-2ff6667addb2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1931.703685] env[62684]: DEBUG nova.compute.manager [None req-9b058a39-119a-40b8-9a4b-3032d26bbc5c tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1931.703685] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9b058a39-119a-40b8-9a4b-3032d26bbc5c tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1931.703685] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ef34791-0abe-49a1-a4f2-19d365f631b6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.712043] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b058a39-119a-40b8-9a4b-3032d26bbc5c tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1931.712043] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9066a67f-7678-49dc-bcba-43f7684602fa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.720110] env[62684]: DEBUG oslo_vmware.api [None req-9b058a39-119a-40b8-9a4b-3032d26bbc5c tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Waiting for the task: (returnval){ [ 1931.720110] env[62684]: value = "task-2052873" [ 1931.720110] env[62684]: _type = "Task" [ 1931.720110] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1931.729216] env[62684]: DEBUG oslo_vmware.api [None req-9b058a39-119a-40b8-9a4b-3032d26bbc5c tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': task-2052873, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1931.738992] env[62684]: DEBUG oslo_vmware.api [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052871, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.129304} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1931.739289] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1931.740307] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb415831-432d-4893-abd8-88837a707b61 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.752132] env[62684]: DEBUG oslo_vmware.api [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2052872, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.197736} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1931.761683] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1931.761897] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1931.762100] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1931.762421] env[62684]: INFO nova.compute.manager [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1931.762743] env[62684]: DEBUG oslo.service.loopingcall [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1931.771476] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] e3dd1bc0-f292-4ac7-a8db-324887a18411/e3dd1bc0-f292-4ac7-a8db-324887a18411.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1931.771749] env[62684]: DEBUG nova.compute.manager [-] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1931.771844] env[62684]: DEBUG nova.network.neutron [-] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1931.773568] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dd6fce6b-e0bf-40fd-a802-6c9a46de978e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.795128] env[62684]: DEBUG oslo_vmware.api [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1931.795128] env[62684]: value = "task-2052874" [ 1931.795128] env[62684]: _type = "Task" [ 1931.795128] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1931.804359] env[62684]: DEBUG oslo_vmware.api [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052874, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1931.911956] env[62684]: INFO nova.compute.manager [None req-25e26b34-b1e1-40f9-bee1-379cbc994905 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Unrescuing [ 1931.912318] env[62684]: DEBUG oslo_concurrency.lockutils [None req-25e26b34-b1e1-40f9-bee1-379cbc994905 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquiring lock "refresh_cache-b1f70e39-bf37-4fb8-b95b-653b59bec265" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1931.912480] env[62684]: DEBUG oslo_concurrency.lockutils [None req-25e26b34-b1e1-40f9-bee1-379cbc994905 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquired lock "refresh_cache-b1f70e39-bf37-4fb8-b95b-653b59bec265" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1931.912657] env[62684]: DEBUG nova.network.neutron [None req-25e26b34-b1e1-40f9-bee1-379cbc994905 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1932.033231] env[62684]: DEBUG nova.compute.utils [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1932.201235] env[62684]: DEBUG oslo_vmware.api [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052870, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.231075] env[62684]: DEBUG oslo_vmware.api [None req-9b058a39-119a-40b8-9a4b-3032d26bbc5c tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': task-2052873, 'name': PowerOffVM_Task, 'duration_secs': 0.424373} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1932.231472] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b058a39-119a-40b8-9a4b-3032d26bbc5c tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1932.231781] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9b058a39-119a-40b8-9a4b-3032d26bbc5c tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1932.231915] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-46ce2cf7-df62-4a31-99c0-08c38fb99e45 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.262369] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9b058a39-119a-40b8-9a4b-3032d26bbc5c tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1932.262872] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9b058a39-119a-40b8-9a4b-3032d26bbc5c tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1932.262872] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b058a39-119a-40b8-9a4b-3032d26bbc5c tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Deleting the datastore file [datastore1] 8449f09b-4e7b-4511-bb3c-2ff6667addb2 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1932.263055] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-082d2365-0008-4a2a-a739-159bc976363f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.270497] env[62684]: DEBUG oslo_vmware.api [None req-9b058a39-119a-40b8-9a4b-3032d26bbc5c tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Waiting for the task: (returnval){ [ 1932.270497] env[62684]: value = "task-2052876" [ 1932.270497] env[62684]: _type = "Task" [ 1932.270497] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.279404] env[62684]: DEBUG oslo_vmware.api [None req-9b058a39-119a-40b8-9a4b-3032d26bbc5c tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': task-2052876, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.306884] env[62684]: DEBUG oslo_vmware.api [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052874, 'name': ReconfigVM_Task, 'duration_secs': 0.306457} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1932.306884] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Reconfigured VM instance instance-00000033 to attach disk [datastore1] e3dd1bc0-f292-4ac7-a8db-324887a18411/e3dd1bc0-f292-4ac7-a8db-324887a18411.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1932.307405] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5cd12814-d2b1-4a5e-8f13-2605e2e7ccaf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.315049] env[62684]: DEBUG oslo_vmware.api [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1932.315049] env[62684]: value = "task-2052877" [ 1932.315049] env[62684]: _type = "Task" [ 1932.315049] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.332397] env[62684]: DEBUG oslo_vmware.api [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052877, 'name': Rename_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.429467] env[62684]: DEBUG nova.network.neutron [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Successfully created port: cd978df3-9bd9-4010-88e9-d5a4127cf2a9 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1932.537825] env[62684]: DEBUG nova.compute.manager [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1932.585246] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a624a2df-7841-4f1b-afd8-882439f35fdf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.594180] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70f5c84c-db28-4aa8-8986-28fbbe827b0e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.628735] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e7a0dd9-835b-4f8f-b40a-f1c8661664a4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.637031] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-508e956d-3ca6-41ff-ad6f-1e4ad6b42aca {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.654432] env[62684]: DEBUG nova.compute.provider_tree [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1932.697174] env[62684]: DEBUG oslo_vmware.api [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052870, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.789815] env[62684]: DEBUG oslo_vmware.api [None req-9b058a39-119a-40b8-9a4b-3032d26bbc5c tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Task: {'id': task-2052876, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.109766} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1932.789815] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b058a39-119a-40b8-9a4b-3032d26bbc5c tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1932.789815] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9b058a39-119a-40b8-9a4b-3032d26bbc5c tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1932.789815] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9b058a39-119a-40b8-9a4b-3032d26bbc5c tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1932.789815] env[62684]: INFO nova.compute.manager [None req-9b058a39-119a-40b8-9a4b-3032d26bbc5c tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Took 1.09 seconds to destroy the instance on the hypervisor. [ 1932.789815] env[62684]: DEBUG oslo.service.loopingcall [None req-9b058a39-119a-40b8-9a4b-3032d26bbc5c tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1932.789815] env[62684]: DEBUG nova.compute.manager [-] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1932.789815] env[62684]: DEBUG nova.network.neutron [-] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1932.834723] env[62684]: DEBUG oslo_vmware.api [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052877, 'name': Rename_Task, 'duration_secs': 0.162541} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1932.835061] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1932.835341] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d2428b93-6d2d-4466-92fc-b9127d8f1ba0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.842137] env[62684]: DEBUG oslo_vmware.api [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1932.842137] env[62684]: value = "task-2052878" [ 1932.842137] env[62684]: _type = "Task" [ 1932.842137] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.851028] env[62684]: DEBUG oslo_vmware.api [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052878, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.853485] env[62684]: DEBUG nova.network.neutron [-] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1932.964202] env[62684]: DEBUG nova.network.neutron [None req-25e26b34-b1e1-40f9-bee1-379cbc994905 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Updating instance_info_cache with network_info: [{"id": "35a0f9ef-b68c-43df-8887-6c35257bbc58", "address": "fa:16:3e:7a:c4:6c", "network": {"id": "bd253713-4e81-4c94-9689-22b81e7f51b6", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-307001665-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd812751722143fabedfa986a2d98b59", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35a0f9ef-b6", "ovs_interfaceid": "35a0f9ef-b68c-43df-8887-6c35257bbc58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1933.068210] env[62684]: DEBUG nova.compute.manager [req-2a63125a-ce6c-475f-a1e5-3059952a1415 req-484bfeb6-ecae-4669-8897-d80e86a84df2 service nova] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Received event network-vif-deleted-220c5589-f035-4097-8c0d-dfd565a9203a {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1933.068313] env[62684]: INFO nova.compute.manager [req-2a63125a-ce6c-475f-a1e5-3059952a1415 req-484bfeb6-ecae-4669-8897-d80e86a84df2 service nova] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Neutron deleted interface 220c5589-f035-4097-8c0d-dfd565a9203a; detaching it from the instance and deleting it from the info cache [ 1933.068492] env[62684]: DEBUG nova.network.neutron [req-2a63125a-ce6c-475f-a1e5-3059952a1415 req-484bfeb6-ecae-4669-8897-d80e86a84df2 service nova] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1933.160194] env[62684]: DEBUG nova.scheduler.client.report [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1933.199341] env[62684]: DEBUG oslo_vmware.api [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052870, 'name': CloneVM_Task, 'duration_secs': 1.981112} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1933.199518] env[62684]: INFO nova.virt.vmwareapi.vmops [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Created linked-clone VM from snapshot [ 1933.200509] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abd0731a-0a6e-49d7-9786-46f2a051c809 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.210295] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Uploading image 0a70b1eb-e88e-4218-abd5-edf1113ecf05 {{(pid=62684) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1933.246120] env[62684]: DEBUG oslo_vmware.rw_handles [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1933.246120] env[62684]: value = "vm-421270" [ 1933.246120] env[62684]: _type = "VirtualMachine" [ 1933.246120] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1933.246432] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-d0a893a3-c564-4b86-9403-4d32e4a1ac12 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.255399] env[62684]: DEBUG oslo_vmware.rw_handles [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lease: (returnval){ [ 1933.255399] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c47ca3-9e3c-3431-2544-d38420a6f011" [ 1933.255399] env[62684]: _type = "HttpNfcLease" [ 1933.255399] env[62684]: } obtained for exporting VM: (result){ [ 1933.255399] env[62684]: value = "vm-421270" [ 1933.255399] env[62684]: _type = "VirtualMachine" [ 1933.255399] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1933.255399] env[62684]: DEBUG oslo_vmware.api [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the lease: (returnval){ [ 1933.255399] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c47ca3-9e3c-3431-2544-d38420a6f011" [ 1933.255399] env[62684]: _type = "HttpNfcLease" [ 1933.255399] env[62684]: } to be ready. {{(pid=62684) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1933.262428] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1933.262428] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c47ca3-9e3c-3431-2544-d38420a6f011" [ 1933.262428] env[62684]: _type = "HttpNfcLease" [ 1933.262428] env[62684]: } is initializing. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1933.298230] env[62684]: DEBUG nova.network.neutron [-] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1933.356915] env[62684]: DEBUG nova.network.neutron [-] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1933.359110] env[62684]: DEBUG oslo_vmware.api [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052878, 'name': PowerOnVM_Task, 'duration_secs': 0.458639} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1933.359606] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1933.359835] env[62684]: INFO nova.compute.manager [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Took 7.39 seconds to spawn the instance on the hypervisor. [ 1933.360023] env[62684]: DEBUG nova.compute.manager [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1933.360840] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dec48a7-72ff-490c-bcf2-d6d792e3e061 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.469772] env[62684]: DEBUG oslo_concurrency.lockutils [None req-25e26b34-b1e1-40f9-bee1-379cbc994905 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Releasing lock "refresh_cache-b1f70e39-bf37-4fb8-b95b-653b59bec265" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1933.470541] env[62684]: DEBUG nova.objects.instance [None req-25e26b34-b1e1-40f9-bee1-379cbc994905 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lazy-loading 'flavor' on Instance uuid b1f70e39-bf37-4fb8-b95b-653b59bec265 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1933.548633] env[62684]: DEBUG nova.compute.manager [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1933.576676] env[62684]: DEBUG nova.virt.hardware [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:45:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='297537872',id=19,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-2095684475',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1933.576960] env[62684]: DEBUG nova.virt.hardware [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1933.577176] env[62684]: DEBUG nova.virt.hardware [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1933.577388] env[62684]: DEBUG nova.virt.hardware [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1933.578205] env[62684]: DEBUG nova.virt.hardware [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1933.578205] env[62684]: DEBUG nova.virt.hardware [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1933.578205] env[62684]: DEBUG nova.virt.hardware [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1933.578205] env[62684]: DEBUG nova.virt.hardware [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1933.578436] env[62684]: DEBUG nova.virt.hardware [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1933.578436] env[62684]: DEBUG nova.virt.hardware [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1933.578795] env[62684]: DEBUG nova.virt.hardware [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1933.579796] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41decd1e-88f0-4cb4-9949-268a4ae7d366 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.582880] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6492d48c-0550-45fa-ae0a-d63bb2219fee {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.591658] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12036ebf-dd4b-4a86-ac36-681b85216031 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.600274] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a8bb2a3-d644-4f06-a511-8a2690fb9a2b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.636477] env[62684]: DEBUG nova.compute.manager [req-2a63125a-ce6c-475f-a1e5-3059952a1415 req-484bfeb6-ecae-4669-8897-d80e86a84df2 service nova] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Detach interface failed, port_id=220c5589-f035-4097-8c0d-dfd565a9203a, reason: Instance 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978 could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1933.667145] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.657s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1933.667301] env[62684]: DEBUG nova.compute.manager [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1933.670662] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 45.126s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1933.763942] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1933.763942] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c47ca3-9e3c-3431-2544-d38420a6f011" [ 1933.763942] env[62684]: _type = "HttpNfcLease" [ 1933.763942] env[62684]: } is ready. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1933.764229] env[62684]: DEBUG oslo_vmware.rw_handles [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1933.764229] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c47ca3-9e3c-3431-2544-d38420a6f011" [ 1933.764229] env[62684]: _type = "HttpNfcLease" [ 1933.764229] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1933.765041] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7141448c-e6db-4dff-9442-416c5a090f1f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.773851] env[62684]: DEBUG oslo_vmware.rw_handles [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524b79ae-d98b-d000-d6b1-43e7b66adc6b/disk-0.vmdk from lease info. {{(pid=62684) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1933.774031] env[62684]: DEBUG oslo_vmware.rw_handles [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524b79ae-d98b-d000-d6b1-43e7b66adc6b/disk-0.vmdk for reading. {{(pid=62684) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1933.840237] env[62684]: INFO nova.compute.manager [-] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Took 2.07 seconds to deallocate network for instance. [ 1933.860770] env[62684]: INFO nova.compute.manager [-] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Took 1.07 seconds to deallocate network for instance. [ 1933.890595] env[62684]: INFO nova.compute.manager [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Took 56.48 seconds to build instance. [ 1933.897761] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f290441b-fb86-4d35-9f9a-9615bea802bc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.975966] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e33f8a3-08c4-444c-a271-29ff30e204d8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.000794] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-25e26b34-b1e1-40f9-bee1-379cbc994905 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1934.002511] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-766a13de-a57e-4daf-ac6a-19979c371665 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.008874] env[62684]: DEBUG oslo_vmware.api [None req-25e26b34-b1e1-40f9-bee1-379cbc994905 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1934.008874] env[62684]: value = "task-2052880" [ 1934.008874] env[62684]: _type = "Task" [ 1934.008874] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1934.018100] env[62684]: DEBUG oslo_vmware.api [None req-25e26b34-b1e1-40f9-bee1-379cbc994905 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052880, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1934.189264] env[62684]: DEBUG nova.compute.utils [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1934.190824] env[62684]: DEBUG nova.compute.manager [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1934.191023] env[62684]: DEBUG nova.network.neutron [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1934.279742] env[62684]: DEBUG nova.policy [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5892ca114a9d4a6e95c0498c2fc7f2ba', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '61c83953d09c4d1c97eee5a8679c30d4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1934.353838] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1934.367284] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9b058a39-119a-40b8-9a4b-3032d26bbc5c tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1934.392542] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ce5c1720-9b0f-4987-9574-a6b7a22559ee tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "e3dd1bc0-f292-4ac7-a8db-324887a18411" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.125s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1934.459507] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b0c2ce32-d48f-4ad5-a3f9-33213438696b tempest-ServersAdminTestJSON-1140288753 tempest-ServersAdminTestJSON-1140288753-project-admin] Acquiring lock "refresh_cache-e3dd1bc0-f292-4ac7-a8db-324887a18411" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1934.460814] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b0c2ce32-d48f-4ad5-a3f9-33213438696b tempest-ServersAdminTestJSON-1140288753 tempest-ServersAdminTestJSON-1140288753-project-admin] Acquired lock "refresh_cache-e3dd1bc0-f292-4ac7-a8db-324887a18411" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1934.461238] env[62684]: DEBUG nova.network.neutron [None req-b0c2ce32-d48f-4ad5-a3f9-33213438696b tempest-ServersAdminTestJSON-1140288753 tempest-ServersAdminTestJSON-1140288753-project-admin] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1934.518934] env[62684]: DEBUG oslo_vmware.api [None req-25e26b34-b1e1-40f9-bee1-379cbc994905 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052880, 'name': PowerOffVM_Task, 'duration_secs': 0.247055} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1934.519505] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-25e26b34-b1e1-40f9-bee1-379cbc994905 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1934.525204] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-25e26b34-b1e1-40f9-bee1-379cbc994905 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Reconfiguring VM instance instance-0000002d to detach disk 2001 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1934.525545] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-29d90fda-3bb7-4b91-801d-1c1e858b7b48 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.546532] env[62684]: DEBUG oslo_vmware.api [None req-25e26b34-b1e1-40f9-bee1-379cbc994905 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1934.546532] env[62684]: value = "task-2052881" [ 1934.546532] env[62684]: _type = "Task" [ 1934.546532] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1934.560018] env[62684]: DEBUG oslo_vmware.api [None req-25e26b34-b1e1-40f9-bee1-379cbc994905 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052881, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1934.630394] env[62684]: DEBUG nova.network.neutron [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Successfully created port: 56087696-2064-4dae-a727-8e8e32b7bb65 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1934.695640] env[62684]: INFO nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Updating resource usage from migration 079cb72a-143d-49f7-91bd-12a1ad5c9e3e [ 1934.700269] env[62684]: DEBUG nova.compute.manager [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1934.732411] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance effc673a-103f-413b-88ac-6907ad1ee852 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1934.733390] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance dcb0a5b2-379e-44ff-a9b0-be615943c94e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1934.733390] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 06751c34-0724-44ba-a263-ad27fcf2920f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1934.733390] env[62684]: WARNING nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 0dbd52ac-c987-4728-974e-73e99465c5e7 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1934.733390] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 0676806b-c1f0-4c1a-a12d-add2edf1588f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1934.733636] env[62684]: WARNING nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance ab2c7cbe-6f46-4174-bffb-055a15f2d56b is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1934.733636] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1934.734304] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 4a15d298-115f-4132-8be0-00e623fa21d8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1934.734304] env[62684]: WARNING nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance c6dc5401-f59e-4c18-9553-1240e2f49bce is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1934.734304] env[62684]: WARNING nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1934.734572] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 025dfe36-1f14-4bda-84a0-d424364b745b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1934.734698] env[62684]: WARNING nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance b945f05d-ef1c-4469-9390-f7bbd4f435f0 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1934.734895] env[62684]: WARNING nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 50bc9674-d19c-40f1-a89f-1738a1e48307 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1934.735099] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1934.735334] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance b1f70e39-bf37-4fb8-b95b-653b59bec265 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1934.735520] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 26303c0e-be87-41ff-a15c-e92f91f8a05f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1934.736069] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance ca3d1a73-6f3b-4278-8fe7-03b66f407ba6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1934.736069] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 8449f09b-4e7b-4511-bb3c-2ff6667addb2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1934.736172] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 274d214a-4b92-4900-a66c-54baea2a68f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1934.736388] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance e3dd1bc0-f292-4ac7-a8db-324887a18411 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1934.736480] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance feca8680-4baa-4b2c-9875-69a88b351dc0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1934.736629] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance b009f710-1a94-4113-8feb-7cc5dd6a6519 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1934.839958] env[62684]: DEBUG nova.compute.manager [req-1fc27b6a-062e-4689-a6d4-392ff7b43ad1 req-0fff5146-6ad3-4f1c-a55b-6da0f2a24bc6 service nova] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Received event network-vif-plugged-cd978df3-9bd9-4010-88e9-d5a4127cf2a9 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1934.840217] env[62684]: DEBUG oslo_concurrency.lockutils [req-1fc27b6a-062e-4689-a6d4-392ff7b43ad1 req-0fff5146-6ad3-4f1c-a55b-6da0f2a24bc6 service nova] Acquiring lock "feca8680-4baa-4b2c-9875-69a88b351dc0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1934.840664] env[62684]: DEBUG oslo_concurrency.lockutils [req-1fc27b6a-062e-4689-a6d4-392ff7b43ad1 req-0fff5146-6ad3-4f1c-a55b-6da0f2a24bc6 service nova] Lock "feca8680-4baa-4b2c-9875-69a88b351dc0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1934.840664] env[62684]: DEBUG oslo_concurrency.lockutils [req-1fc27b6a-062e-4689-a6d4-392ff7b43ad1 req-0fff5146-6ad3-4f1c-a55b-6da0f2a24bc6 service nova] Lock "feca8680-4baa-4b2c-9875-69a88b351dc0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1934.840785] env[62684]: DEBUG nova.compute.manager [req-1fc27b6a-062e-4689-a6d4-392ff7b43ad1 req-0fff5146-6ad3-4f1c-a55b-6da0f2a24bc6 service nova] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] No waiting events found dispatching network-vif-plugged-cd978df3-9bd9-4010-88e9-d5a4127cf2a9 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1934.840920] env[62684]: WARNING nova.compute.manager [req-1fc27b6a-062e-4689-a6d4-392ff7b43ad1 req-0fff5146-6ad3-4f1c-a55b-6da0f2a24bc6 service nova] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Received unexpected event network-vif-plugged-cd978df3-9bd9-4010-88e9-d5a4127cf2a9 for instance with vm_state building and task_state spawning. [ 1935.058715] env[62684]: DEBUG oslo_vmware.api [None req-25e26b34-b1e1-40f9-bee1-379cbc994905 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052881, 'name': ReconfigVM_Task, 'duration_secs': 0.413715} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1935.058715] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-25e26b34-b1e1-40f9-bee1-379cbc994905 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Reconfigured VM instance instance-0000002d to detach disk 2001 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1935.058715] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-25e26b34-b1e1-40f9-bee1-379cbc994905 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1935.058715] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e8c0514b-e1f2-4636-a0ec-f609403defa1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.067968] env[62684]: DEBUG oslo_vmware.api [None req-25e26b34-b1e1-40f9-bee1-379cbc994905 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1935.067968] env[62684]: value = "task-2052882" [ 1935.067968] env[62684]: _type = "Task" [ 1935.067968] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1935.079195] env[62684]: DEBUG oslo_vmware.api [None req-25e26b34-b1e1-40f9-bee1-379cbc994905 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052882, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1935.096537] env[62684]: DEBUG nova.network.neutron [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Successfully updated port: cd978df3-9bd9-4010-88e9-d5a4127cf2a9 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1935.243081] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance df93c57e-716c-4c73-b551-9079a523ea0b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1935.575541] env[62684]: DEBUG oslo_vmware.api [None req-25e26b34-b1e1-40f9-bee1-379cbc994905 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052882, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1935.600922] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Acquiring lock "refresh_cache-feca8680-4baa-4b2c-9875-69a88b351dc0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1935.601225] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Acquired lock "refresh_cache-feca8680-4baa-4b2c-9875-69a88b351dc0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1935.601436] env[62684]: DEBUG nova.network.neutron [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1935.713095] env[62684]: DEBUG nova.compute.manager [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1935.738453] env[62684]: DEBUG nova.network.neutron [None req-b0c2ce32-d48f-4ad5-a3f9-33213438696b tempest-ServersAdminTestJSON-1140288753 tempest-ServersAdminTestJSON-1140288753-project-admin] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Updating instance_info_cache with network_info: [{"id": "c0c87a54-475a-48ca-96cc-988f06008d07", "address": "fa:16:3e:63:f9:9d", "network": {"id": "bca0ee43-bbb1-483b-9d82-56955369f9b7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1592250106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aef5d7061c834332b9f9c5c75596bf08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bfa7abe-7e46-4d8f-b50a-4d0c4509e4dc", "external-id": "nsx-vlan-transportzone-951", "segmentation_id": 951, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0c87a54-47", "ovs_interfaceid": "c0c87a54-475a-48ca-96cc-988f06008d07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1935.747711] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance a3c7943e-7528-41bc-9a20-1e2b57f832e3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1935.763795] env[62684]: DEBUG nova.virt.hardware [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1935.764146] env[62684]: DEBUG nova.virt.hardware [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1935.764568] env[62684]: DEBUG nova.virt.hardware [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1935.764830] env[62684]: DEBUG nova.virt.hardware [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1935.765372] env[62684]: DEBUG nova.virt.hardware [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1935.765587] env[62684]: DEBUG nova.virt.hardware [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1935.765842] env[62684]: DEBUG nova.virt.hardware [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1935.766310] env[62684]: DEBUG nova.virt.hardware [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1935.766521] env[62684]: DEBUG nova.virt.hardware [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1935.766731] env[62684]: DEBUG nova.virt.hardware [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1935.767169] env[62684]: DEBUG nova.virt.hardware [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1935.769017] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29c19953-4ca8-490f-8333-fe2f3f6cee75 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.779478] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c420b8e7-ac79-461c-931e-38ca3a52e606 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.077925] env[62684]: DEBUG oslo_vmware.api [None req-25e26b34-b1e1-40f9-bee1-379cbc994905 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2052882, 'name': PowerOnVM_Task, 'duration_secs': 0.556645} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1936.077925] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-25e26b34-b1e1-40f9-bee1-379cbc994905 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1936.077925] env[62684]: DEBUG nova.compute.manager [None req-25e26b34-b1e1-40f9-bee1-379cbc994905 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1936.078854] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-579f032a-d0e4-44c5-8c91-4ff0349d4409 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.167378] env[62684]: DEBUG nova.network.neutron [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1936.243763] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b0c2ce32-d48f-4ad5-a3f9-33213438696b tempest-ServersAdminTestJSON-1140288753 tempest-ServersAdminTestJSON-1140288753-project-admin] Releasing lock "refresh_cache-e3dd1bc0-f292-4ac7-a8db-324887a18411" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1936.244221] env[62684]: DEBUG nova.compute.manager [None req-b0c2ce32-d48f-4ad5-a3f9-33213438696b tempest-ServersAdminTestJSON-1140288753 tempest-ServersAdminTestJSON-1140288753-project-admin] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Inject network info {{(pid=62684) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7220}} [ 1936.244609] env[62684]: DEBUG nova.compute.manager [None req-b0c2ce32-d48f-4ad5-a3f9-33213438696b tempest-ServersAdminTestJSON-1140288753 tempest-ServersAdminTestJSON-1140288753-project-admin] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] network_info to inject: |[{"id": "c0c87a54-475a-48ca-96cc-988f06008d07", "address": "fa:16:3e:63:f9:9d", "network": {"id": "bca0ee43-bbb1-483b-9d82-56955369f9b7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1592250106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aef5d7061c834332b9f9c5c75596bf08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bfa7abe-7e46-4d8f-b50a-4d0c4509e4dc", "external-id": "nsx-vlan-transportzone-951", "segmentation_id": 951, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0c87a54-47", "ovs_interfaceid": "c0c87a54-475a-48ca-96cc-988f06008d07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7221}} [ 1936.251085] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b0c2ce32-d48f-4ad5-a3f9-33213438696b tempest-ServersAdminTestJSON-1140288753 tempest-ServersAdminTestJSON-1140288753-project-admin] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Reconfiguring VM instance to set the machine id {{(pid=62684) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1936.254497] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0a0eac19-73a7-406b-a8d9-1dd4aaff4cb8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.267928] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 31419285-9fdf-4d37-94d7-d1b08c6b6b05 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1936.280869] env[62684]: DEBUG oslo_vmware.api [None req-b0c2ce32-d48f-4ad5-a3f9-33213438696b tempest-ServersAdminTestJSON-1140288753 tempest-ServersAdminTestJSON-1140288753-project-admin] Waiting for the task: (returnval){ [ 1936.280869] env[62684]: value = "task-2052883" [ 1936.280869] env[62684]: _type = "Task" [ 1936.280869] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1936.298237] env[62684]: DEBUG oslo_vmware.api [None req-b0c2ce32-d48f-4ad5-a3f9-33213438696b tempest-ServersAdminTestJSON-1140288753 tempest-ServersAdminTestJSON-1140288753-project-admin] Task: {'id': task-2052883, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1936.434502] env[62684]: DEBUG nova.network.neutron [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Updating instance_info_cache with network_info: [{"id": "cd978df3-9bd9-4010-88e9-d5a4127cf2a9", "address": "fa:16:3e:b8:e7:a7", "network": {"id": "7982dcb9-e661-4690-9931-bf412f4a564e", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1278071472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72ac36eda47d4c51a4b421c764d0404d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9bb629cd-6d0f-4bed-965c-bd04a2f3ec49", "external-id": "nsx-vlan-transportzone-848", "segmentation_id": 848, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd978df3-9b", "ovs_interfaceid": "cd978df3-9bd9-4010-88e9-d5a4127cf2a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1936.654234] env[62684]: DEBUG nova.network.neutron [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Successfully updated port: 56087696-2064-4dae-a727-8e8e32b7bb65 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1936.775953] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 18a97088-fffa-4b77-8ab0-d24f6f84f516 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1936.779229] env[62684]: DEBUG nova.compute.manager [req-82d90110-fd2c-45be-ac65-68e9dafaf505 req-016e4506-b145-41e6-aa74-c3c129e4f398 service nova] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Received event network-vif-plugged-56087696-2064-4dae-a727-8e8e32b7bb65 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1936.779522] env[62684]: DEBUG oslo_concurrency.lockutils [req-82d90110-fd2c-45be-ac65-68e9dafaf505 req-016e4506-b145-41e6-aa74-c3c129e4f398 service nova] Acquiring lock "b009f710-1a94-4113-8feb-7cc5dd6a6519-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1936.780046] env[62684]: DEBUG oslo_concurrency.lockutils [req-82d90110-fd2c-45be-ac65-68e9dafaf505 req-016e4506-b145-41e6-aa74-c3c129e4f398 service nova] Lock "b009f710-1a94-4113-8feb-7cc5dd6a6519-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1936.780265] env[62684]: DEBUG oslo_concurrency.lockutils [req-82d90110-fd2c-45be-ac65-68e9dafaf505 req-016e4506-b145-41e6-aa74-c3c129e4f398 service nova] Lock "b009f710-1a94-4113-8feb-7cc5dd6a6519-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1936.780704] env[62684]: DEBUG nova.compute.manager [req-82d90110-fd2c-45be-ac65-68e9dafaf505 req-016e4506-b145-41e6-aa74-c3c129e4f398 service nova] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] No waiting events found dispatching network-vif-plugged-56087696-2064-4dae-a727-8e8e32b7bb65 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1936.780704] env[62684]: WARNING nova.compute.manager [req-82d90110-fd2c-45be-ac65-68e9dafaf505 req-016e4506-b145-41e6-aa74-c3c129e4f398 service nova] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Received unexpected event network-vif-plugged-56087696-2064-4dae-a727-8e8e32b7bb65 for instance with vm_state building and task_state spawning. [ 1936.791128] env[62684]: DEBUG oslo_vmware.api [None req-b0c2ce32-d48f-4ad5-a3f9-33213438696b tempest-ServersAdminTestJSON-1140288753 tempest-ServersAdminTestJSON-1140288753-project-admin] Task: {'id': task-2052883, 'name': ReconfigVM_Task, 'duration_secs': 0.198637} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1936.791419] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b0c2ce32-d48f-4ad5-a3f9-33213438696b tempest-ServersAdminTestJSON-1140288753 tempest-ServersAdminTestJSON-1140288753-project-admin] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Reconfigured VM instance to set the machine id {{(pid=62684) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1936.940734] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Releasing lock "refresh_cache-feca8680-4baa-4b2c-9875-69a88b351dc0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1936.940734] env[62684]: DEBUG nova.compute.manager [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Instance network_info: |[{"id": "cd978df3-9bd9-4010-88e9-d5a4127cf2a9", "address": "fa:16:3e:b8:e7:a7", "network": {"id": "7982dcb9-e661-4690-9931-bf412f4a564e", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1278071472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72ac36eda47d4c51a4b421c764d0404d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9bb629cd-6d0f-4bed-965c-bd04a2f3ec49", "external-id": "nsx-vlan-transportzone-848", "segmentation_id": 848, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd978df3-9b", "ovs_interfaceid": "cd978df3-9bd9-4010-88e9-d5a4127cf2a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1936.940734] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b8:e7:a7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9bb629cd-6d0f-4bed-965c-bd04a2f3ec49', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cd978df3-9bd9-4010-88e9-d5a4127cf2a9', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1936.956914] env[62684]: DEBUG oslo.service.loopingcall [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1936.956914] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1936.956914] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b449f2c2-1d94-4b0c-a58c-17aa4448d153 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.991769] env[62684]: DEBUG nova.compute.manager [req-1e2746b1-eec2-4463-984e-1bf2f6e05c57 req-a33c43c0-0b6c-4e34-9ac9-a8622ee4cf5f service nova] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Received event network-changed-cd978df3-9bd9-4010-88e9-d5a4127cf2a9 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1936.991769] env[62684]: DEBUG nova.compute.manager [req-1e2746b1-eec2-4463-984e-1bf2f6e05c57 req-a33c43c0-0b6c-4e34-9ac9-a8622ee4cf5f service nova] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Refreshing instance network info cache due to event network-changed-cd978df3-9bd9-4010-88e9-d5a4127cf2a9. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1936.991769] env[62684]: DEBUG oslo_concurrency.lockutils [req-1e2746b1-eec2-4463-984e-1bf2f6e05c57 req-a33c43c0-0b6c-4e34-9ac9-a8622ee4cf5f service nova] Acquiring lock "refresh_cache-feca8680-4baa-4b2c-9875-69a88b351dc0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1936.991769] env[62684]: DEBUG oslo_concurrency.lockutils [req-1e2746b1-eec2-4463-984e-1bf2f6e05c57 req-a33c43c0-0b6c-4e34-9ac9-a8622ee4cf5f service nova] Acquired lock "refresh_cache-feca8680-4baa-4b2c-9875-69a88b351dc0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1936.991769] env[62684]: DEBUG nova.network.neutron [req-1e2746b1-eec2-4463-984e-1bf2f6e05c57 req-a33c43c0-0b6c-4e34-9ac9-a8622ee4cf5f service nova] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Refreshing network info cache for port cd978df3-9bd9-4010-88e9-d5a4127cf2a9 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1936.997382] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1936.997382] env[62684]: value = "task-2052884" [ 1936.997382] env[62684]: _type = "Task" [ 1936.997382] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1937.006927] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052884, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1937.045543] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "57537508-06e7-43a4-95c5-c4399b8bf93f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1937.045966] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "57537508-06e7-43a4-95c5-c4399b8bf93f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1937.160069] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquiring lock "refresh_cache-b009f710-1a94-4113-8feb-7cc5dd6a6519" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1937.160237] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquired lock "refresh_cache-b009f710-1a94-4113-8feb-7cc5dd6a6519" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1937.160390] env[62684]: DEBUG nova.network.neutron [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1937.283065] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance a56a3fab-e491-44f5-9cf4-2c308138ffc4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1937.517188] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052884, 'name': CreateVM_Task, 'duration_secs': 0.41822} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1937.517520] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1937.519046] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1937.519046] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1937.519660] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1937.519993] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7208c88a-b791-4d9f-9793-4e8888b4e3ee {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.526765] env[62684]: DEBUG oslo_vmware.api [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Waiting for the task: (returnval){ [ 1937.526765] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]522f4826-6206-b078-f25e-ca66cd64bb69" [ 1937.526765] env[62684]: _type = "Task" [ 1937.526765] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1937.536694] env[62684]: DEBUG oslo_vmware.api [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]522f4826-6206-b078-f25e-ca66cd64bb69, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1937.551313] env[62684]: DEBUG nova.compute.manager [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1937.763819] env[62684]: DEBUG nova.network.neutron [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1937.771972] env[62684]: DEBUG nova.network.neutron [req-1e2746b1-eec2-4463-984e-1bf2f6e05c57 req-a33c43c0-0b6c-4e34-9ac9-a8622ee4cf5f service nova] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Updated VIF entry in instance network info cache for port cd978df3-9bd9-4010-88e9-d5a4127cf2a9. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1937.772388] env[62684]: DEBUG nova.network.neutron [req-1e2746b1-eec2-4463-984e-1bf2f6e05c57 req-a33c43c0-0b6c-4e34-9ac9-a8622ee4cf5f service nova] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Updating instance_info_cache with network_info: [{"id": "cd978df3-9bd9-4010-88e9-d5a4127cf2a9", "address": "fa:16:3e:b8:e7:a7", "network": {"id": "7982dcb9-e661-4690-9931-bf412f4a564e", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1278071472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72ac36eda47d4c51a4b421c764d0404d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9bb629cd-6d0f-4bed-965c-bd04a2f3ec49", "external-id": "nsx-vlan-transportzone-848", "segmentation_id": 848, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd978df3-9b", "ovs_interfaceid": "cd978df3-9bd9-4010-88e9-d5a4127cf2a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1937.785820] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance dab11b88-ac23-43f0-9203-024faf41e1f5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1937.786103] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Migration 079cb72a-143d-49f7-91bd-12a1ad5c9e3e is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1937.786302] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 02dc8c41-5092-4f84-9722-37d4df3a459a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1938.037923] env[62684]: DEBUG oslo_vmware.api [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]522f4826-6206-b078-f25e-ca66cd64bb69, 'name': SearchDatastore_Task, 'duration_secs': 0.012389} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1938.038558] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1938.038868] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1938.039134] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1938.039293] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1938.039482] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1938.039770] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8a9ccfd6-c1d9-4c91-9a71-3f91d69bcbc3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.050444] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1938.050665] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1938.051510] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0db2558d-3f80-4cfe-a0c8-863ca0565967 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.061183] env[62684]: DEBUG oslo_vmware.api [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Waiting for the task: (returnval){ [ 1938.061183] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52220d34-853a-16f4-22c8-411f05e89e4a" [ 1938.061183] env[62684]: _type = "Task" [ 1938.061183] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1938.062145] env[62684]: DEBUG nova.network.neutron [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Updating instance_info_cache with network_info: [{"id": "56087696-2064-4dae-a727-8e8e32b7bb65", "address": "fa:16:3e:1a:e1:25", "network": {"id": "95ad5a29-9716-4b0c-937b-33a498c74ef7", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-342262548-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61c83953d09c4d1c97eee5a8679c30d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c68b7663-4f0e-47f0-ac7f-40c6d952f7bb", "external-id": "nsx-vlan-transportzone-696", "segmentation_id": 696, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56087696-20", "ovs_interfaceid": "56087696-2064-4dae-a727-8e8e32b7bb65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1938.083022] env[62684]: DEBUG oslo_vmware.api [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52220d34-853a-16f4-22c8-411f05e89e4a, 'name': SearchDatastore_Task, 'duration_secs': 0.0201} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1938.086508] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6cbfebcc-d1ea-4cf0-bd05-87af7eb477ae {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.093537] env[62684]: DEBUG oslo_vmware.api [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Waiting for the task: (returnval){ [ 1938.093537] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]527944cc-7aef-c619-04ec-2383d001e1fd" [ 1938.093537] env[62684]: _type = "Task" [ 1938.093537] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1938.103056] env[62684]: DEBUG oslo_vmware.api [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]527944cc-7aef-c619-04ec-2383d001e1fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1938.103727] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1938.275018] env[62684]: DEBUG oslo_concurrency.lockutils [req-1e2746b1-eec2-4463-984e-1bf2f6e05c57 req-a33c43c0-0b6c-4e34-9ac9-a8622ee4cf5f service nova] Releasing lock "refresh_cache-feca8680-4baa-4b2c-9875-69a88b351dc0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1938.292020] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1938.292020] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Instance with task_state "unshelving" is not being actively managed by this compute host but has allocations referencing this compute node (c23c281e-ec1f-4876-972e-a98655f2084f): {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocations during the task state transition. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1780}} [ 1938.292020] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Total usable vcpus: 48, total allocated vcpus: 17 {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1938.292020] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3840MB phys_disk=200GB used_disk=18GB total_vcpus=48 used_vcpus=17 pci_stats=[] {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1938.565532] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Releasing lock "refresh_cache-b009f710-1a94-4113-8feb-7cc5dd6a6519" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1938.566176] env[62684]: DEBUG nova.compute.manager [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Instance network_info: |[{"id": "56087696-2064-4dae-a727-8e8e32b7bb65", "address": "fa:16:3e:1a:e1:25", "network": {"id": "95ad5a29-9716-4b0c-937b-33a498c74ef7", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-342262548-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61c83953d09c4d1c97eee5a8679c30d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c68b7663-4f0e-47f0-ac7f-40c6d952f7bb", "external-id": "nsx-vlan-transportzone-696", "segmentation_id": 696, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56087696-20", "ovs_interfaceid": "56087696-2064-4dae-a727-8e8e32b7bb65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1938.566944] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1a:e1:25', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c68b7663-4f0e-47f0-ac7f-40c6d952f7bb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '56087696-2064-4dae-a727-8e8e32b7bb65', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1938.575841] env[62684]: DEBUG oslo.service.loopingcall [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1938.576290] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1938.576683] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-811cf53a-799d-46e2-84ee-7384024807de {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.608863] env[62684]: DEBUG oslo_vmware.api [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]527944cc-7aef-c619-04ec-2383d001e1fd, 'name': SearchDatastore_Task, 'duration_secs': 0.018818} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1938.612652] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1938.612951] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] feca8680-4baa-4b2c-9875-69a88b351dc0/feca8680-4baa-4b2c-9875-69a88b351dc0.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1938.613228] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1938.613228] env[62684]: value = "task-2052885" [ 1938.613228] env[62684]: _type = "Task" [ 1938.613228] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1938.613610] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-52b72a2b-25d6-41ea-ac57-ed86b67dcb0e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.625120] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052885, 'name': CreateVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1938.626412] env[62684]: DEBUG oslo_vmware.api [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Waiting for the task: (returnval){ [ 1938.626412] env[62684]: value = "task-2052886" [ 1938.626412] env[62684]: _type = "Task" [ 1938.626412] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1938.636692] env[62684]: DEBUG oslo_vmware.api [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': task-2052886, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1938.900089] env[62684]: DEBUG nova.compute.manager [req-fc7b6114-580e-45bd-aaa0-860f13ca3c60 req-941bdca0-cfdc-42f7-8020-31ae99a0cd16 service nova] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Received event network-changed-56087696-2064-4dae-a727-8e8e32b7bb65 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1938.900321] env[62684]: DEBUG nova.compute.manager [req-fc7b6114-580e-45bd-aaa0-860f13ca3c60 req-941bdca0-cfdc-42f7-8020-31ae99a0cd16 service nova] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Refreshing instance network info cache due to event network-changed-56087696-2064-4dae-a727-8e8e32b7bb65. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1938.900505] env[62684]: DEBUG oslo_concurrency.lockutils [req-fc7b6114-580e-45bd-aaa0-860f13ca3c60 req-941bdca0-cfdc-42f7-8020-31ae99a0cd16 service nova] Acquiring lock "refresh_cache-b009f710-1a94-4113-8feb-7cc5dd6a6519" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1938.900648] env[62684]: DEBUG oslo_concurrency.lockutils [req-fc7b6114-580e-45bd-aaa0-860f13ca3c60 req-941bdca0-cfdc-42f7-8020-31ae99a0cd16 service nova] Acquired lock "refresh_cache-b009f710-1a94-4113-8feb-7cc5dd6a6519" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1938.900805] env[62684]: DEBUG nova.network.neutron [req-fc7b6114-580e-45bd-aaa0-860f13ca3c60 req-941bdca0-cfdc-42f7-8020-31ae99a0cd16 service nova] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Refreshing network info cache for port 56087696-2064-4dae-a727-8e8e32b7bb65 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1938.994946] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-649a9a73-cc58-4e55-b36c-fe3f81a1c566 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.006535] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57956253-686c-4e24-abf3-3a1516699e06 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.051578] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19a0073a-0177-4917-8008-c7afb73383c2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.062017] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fe65240-78db-4c21-be6f-147915edfb62 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.079920] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1939.128172] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052885, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.138032] env[62684]: DEBUG oslo_vmware.api [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': task-2052886, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.571360] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "0156d807-1ab4-482f-91d1-172bf32bf23c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1939.571360] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "0156d807-1ab4-482f-91d1-172bf32bf23c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1939.582351] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1939.628269] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052885, 'name': CreateVM_Task, 'duration_secs': 0.704285} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1939.628269] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1939.628400] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1939.628524] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1939.628875] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1939.629229] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-878c73db-34d3-46c2-9f94-ad4ac03ce023 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.636940] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for the task: (returnval){ [ 1939.636940] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52976c57-cd93-a0d6-fcf3-a3bd38382360" [ 1939.636940] env[62684]: _type = "Task" [ 1939.636940] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1939.642315] env[62684]: DEBUG oslo_vmware.api [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': task-2052886, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.711682} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1939.645452] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] feca8680-4baa-4b2c-9875-69a88b351dc0/feca8680-4baa-4b2c-9875-69a88b351dc0.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1939.645737] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1939.646041] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-374c845e-0d34-420d-91ce-9922bf0cbd55 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.656093] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52976c57-cd93-a0d6-fcf3-a3bd38382360, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.657519] env[62684]: DEBUG oslo_vmware.api [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Waiting for the task: (returnval){ [ 1939.657519] env[62684]: value = "task-2052887" [ 1939.657519] env[62684]: _type = "Task" [ 1939.657519] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1939.670690] env[62684]: DEBUG oslo_vmware.api [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': task-2052887, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.744548] env[62684]: DEBUG nova.network.neutron [req-fc7b6114-580e-45bd-aaa0-860f13ca3c60 req-941bdca0-cfdc-42f7-8020-31ae99a0cd16 service nova] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Updated VIF entry in instance network info cache for port 56087696-2064-4dae-a727-8e8e32b7bb65. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1939.744933] env[62684]: DEBUG nova.network.neutron [req-fc7b6114-580e-45bd-aaa0-860f13ca3c60 req-941bdca0-cfdc-42f7-8020-31ae99a0cd16 service nova] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Updating instance_info_cache with network_info: [{"id": "56087696-2064-4dae-a727-8e8e32b7bb65", "address": "fa:16:3e:1a:e1:25", "network": {"id": "95ad5a29-9716-4b0c-937b-33a498c74ef7", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-342262548-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61c83953d09c4d1c97eee5a8679c30d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c68b7663-4f0e-47f0-ac7f-40c6d952f7bb", "external-id": "nsx-vlan-transportzone-696", "segmentation_id": 696, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56087696-20", "ovs_interfaceid": "56087696-2064-4dae-a727-8e8e32b7bb65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1939.956791] env[62684]: INFO nova.compute.manager [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Rebuilding instance [ 1939.999597] env[62684]: DEBUG nova.compute.manager [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1940.000489] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4897ef66-4350-4603-b111-37336fa252dc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.087678] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62684) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1940.087957] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 6.418s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1940.088265] env[62684]: DEBUG oslo_concurrency.lockutils [None req-29bb95d8-08e7-4b34-8dfc-140f378a8598 tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 50.523s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1940.088472] env[62684]: DEBUG oslo_concurrency.lockutils [None req-29bb95d8-08e7-4b34-8dfc-140f378a8598 tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1940.090880] env[62684]: DEBUG oslo_concurrency.lockutils [None req-18874118-bb2d-4984-9f2a-fca1202c8b3b tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 49.259s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1940.091089] env[62684]: DEBUG oslo_concurrency.lockutils [None req-18874118-bb2d-4984-9f2a-fca1202c8b3b tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1940.093135] env[62684]: DEBUG oslo_concurrency.lockutils [None req-854730a8-26bf-44f5-b213-04662d09f2f8 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 47.887s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1940.093344] env[62684]: DEBUG oslo_concurrency.lockutils [None req-854730a8-26bf-44f5-b213-04662d09f2f8 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1940.094889] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 45.921s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1940.096661] env[62684]: INFO nova.compute.claims [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1940.153723] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52976c57-cd93-a0d6-fcf3-a3bd38382360, 'name': SearchDatastore_Task, 'duration_secs': 0.018807} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1940.153723] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1940.153723] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1940.153723] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1940.153831] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1940.153987] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1940.154293] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-749e85d4-a4e6-47ad-8ad6-a819947cdce4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.160022] env[62684]: INFO nova.scheduler.client.report [None req-29bb95d8-08e7-4b34-8dfc-140f378a8598 tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Deleted allocations for instance b945f05d-ef1c-4469-9390-f7bbd4f435f0 [ 1940.176937] env[62684]: DEBUG oslo_vmware.api [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': task-2052887, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073034} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1940.178880] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1940.179255] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1940.179482] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1940.180904] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6309ab8-3dbe-402c-a0cb-795b38f037d7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.183284] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5eec493b-4390-4234-8f33-f484939d4dcc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.187415] env[62684]: INFO nova.scheduler.client.report [None req-854730a8-26bf-44f5-b213-04662d09f2f8 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Deleted allocations for instance 50bc9674-d19c-40f1-a89f-1738a1e48307 [ 1940.189628] env[62684]: INFO nova.scheduler.client.report [None req-18874118-bb2d-4984-9f2a-fca1202c8b3b tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Deleted allocations for instance 0dbd52ac-c987-4728-974e-73e99465c5e7 [ 1940.219999] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Reconfiguring VM instance instance-00000034 to attach disk [datastore2] feca8680-4baa-4b2c-9875-69a88b351dc0/feca8680-4baa-4b2c-9875-69a88b351dc0.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1940.220412] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for the task: (returnval){ [ 1940.220412] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ba07af-c7a6-4106-2d04-43049380c634" [ 1940.220412] env[62684]: _type = "Task" [ 1940.220412] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1940.221337] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5051d3bc-ea53-4ffb-93d1-ced95b2edb43 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.253961] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ba07af-c7a6-4106-2d04-43049380c634, 'name': SearchDatastore_Task, 'duration_secs': 0.027835} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1940.253961] env[62684]: DEBUG oslo_concurrency.lockutils [req-fc7b6114-580e-45bd-aaa0-860f13ca3c60 req-941bdca0-cfdc-42f7-8020-31ae99a0cd16 service nova] Releasing lock "refresh_cache-b009f710-1a94-4113-8feb-7cc5dd6a6519" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1940.253961] env[62684]: DEBUG oslo_vmware.api [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Waiting for the task: (returnval){ [ 1940.253961] env[62684]: value = "task-2052888" [ 1940.253961] env[62684]: _type = "Task" [ 1940.253961] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1940.253961] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34ca5bb5-ee72-482e-bf9f-d15c3ae80b36 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.261467] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for the task: (returnval){ [ 1940.261467] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523ade89-d75d-c104-912b-b98cb5809fb2" [ 1940.261467] env[62684]: _type = "Task" [ 1940.261467] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1940.266802] env[62684]: DEBUG oslo_vmware.api [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': task-2052888, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.275024] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523ade89-d75d-c104-912b-b98cb5809fb2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.513165] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1940.514067] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-09cb6bd3-79e4-43d4-ad86-13b916dfa98b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.524021] env[62684]: DEBUG oslo_vmware.api [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1940.524021] env[62684]: value = "task-2052889" [ 1940.524021] env[62684]: _type = "Task" [ 1940.524021] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1940.537853] env[62684]: DEBUG oslo_vmware.api [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052889, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.676764] env[62684]: DEBUG oslo_concurrency.lockutils [None req-29bb95d8-08e7-4b34-8dfc-140f378a8598 tempest-ServersNegativeTestMultiTenantJSON-1096233618 tempest-ServersNegativeTestMultiTenantJSON-1096233618-project-member] Lock "b945f05d-ef1c-4469-9390-f7bbd4f435f0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 54.306s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1940.699601] env[62684]: DEBUG oslo_concurrency.lockutils [None req-18874118-bb2d-4984-9f2a-fca1202c8b3b tempest-AttachInterfacesUnderV243Test-489768781 tempest-AttachInterfacesUnderV243Test-489768781-project-member] Lock "0dbd52ac-c987-4728-974e-73e99465c5e7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 53.274s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1940.712201] env[62684]: DEBUG oslo_concurrency.lockutils [None req-854730a8-26bf-44f5-b213-04662d09f2f8 tempest-ServerMetadataNegativeTestJSON-568110445 tempest-ServerMetadataNegativeTestJSON-568110445-project-member] Lock "50bc9674-d19c-40f1-a89f-1738a1e48307" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 51.475s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1940.765850] env[62684]: DEBUG oslo_vmware.api [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': task-2052888, 'name': ReconfigVM_Task, 'duration_secs': 0.284003} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1940.769683] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Reconfigured VM instance instance-00000034 to attach disk [datastore2] feca8680-4baa-4b2c-9875-69a88b351dc0/feca8680-4baa-4b2c-9875-69a88b351dc0.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1940.770049] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=62684) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 1940.771028] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-04a033ed-9796-46bc-82e0-c7b12ef1c6b7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.780444] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523ade89-d75d-c104-912b-b98cb5809fb2, 'name': SearchDatastore_Task, 'duration_secs': 0.017845} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1940.781577] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1940.781894] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] b009f710-1a94-4113-8feb-7cc5dd6a6519/b009f710-1a94-4113-8feb-7cc5dd6a6519.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1940.782318] env[62684]: DEBUG oslo_vmware.api [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Waiting for the task: (returnval){ [ 1940.782318] env[62684]: value = "task-2052890" [ 1940.782318] env[62684]: _type = "Task" [ 1940.782318] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1940.782579] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0880bca6-0f95-40cd-8c8d-cdfdffd00edc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.795484] env[62684]: DEBUG oslo_vmware.api [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': task-2052890, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.797429] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for the task: (returnval){ [ 1940.797429] env[62684]: value = "task-2052891" [ 1940.797429] env[62684]: _type = "Task" [ 1940.797429] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1940.806328] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052891, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.860739] env[62684]: DEBUG oslo_concurrency.lockutils [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquiring lock "2baabe7a-ed33-4cef-9acc-a7b804610b0a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1940.860993] env[62684]: DEBUG oslo_concurrency.lockutils [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lock "2baabe7a-ed33-4cef-9acc-a7b804610b0a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1941.035056] env[62684]: DEBUG oslo_vmware.api [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052889, 'name': PowerOffVM_Task, 'duration_secs': 0.386703} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1941.035056] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1941.035056] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1941.035932] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f05c010-0e39-4ebe-b28a-74d29b0e7404 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.043416] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1941.043735] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-70346119-d359-4201-906d-33e6fa4b8a54 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.194470] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1941.194470] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1941.194470] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Deleting the datastore file [datastore1] dcb0a5b2-379e-44ff-a9b0-be615943c94e {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1941.194470] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7d0a5710-13cc-4ae0-be84-107b5bd735c2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.203600] env[62684]: DEBUG oslo_vmware.api [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1941.203600] env[62684]: value = "task-2052893" [ 1941.203600] env[62684]: _type = "Task" [ 1941.203600] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1941.213409] env[62684]: DEBUG oslo_vmware.api [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052893, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.296297] env[62684]: DEBUG oslo_vmware.api [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': task-2052890, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.051445} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1941.299177] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=62684) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 1941.300458] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caec7ab2-6437-46de-bdf7-82f0ce4e5043 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.312769] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052891, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.339782] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Reconfiguring VM instance instance-00000034 to attach disk [datastore2] feca8680-4baa-4b2c-9875-69a88b351dc0/ephemeral_0.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1941.340525] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dca586fa-30c9-4506-b578-36573c2bb0d1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.361245] env[62684]: DEBUG oslo_vmware.api [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Waiting for the task: (returnval){ [ 1941.361245] env[62684]: value = "task-2052894" [ 1941.361245] env[62684]: _type = "Task" [ 1941.361245] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1941.370797] env[62684]: DEBUG oslo_vmware.api [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': task-2052894, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.639419] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-845b8e08-d989-41a2-8dd8-73531f2165ab {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.647206] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0409060e-7e09-47d5-8c90-724fe5485ac0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.678525] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db51e3ae-af34-45d9-9731-5fba9ed9aea4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.685880] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a41d0da9-6b8d-42e8-b90b-a91f283613a5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.700478] env[62684]: DEBUG nova.compute.provider_tree [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1941.712846] env[62684]: DEBUG oslo_vmware.api [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052893, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.226943} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1941.713125] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1941.713321] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1941.713500] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1941.809063] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052891, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.543701} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1941.809362] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] b009f710-1a94-4113-8feb-7cc5dd6a6519/b009f710-1a94-4113-8feb-7cc5dd6a6519.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1941.809600] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1941.809863] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-def15f6d-da90-4f7c-a563-7e78e2a34ea9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.818255] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for the task: (returnval){ [ 1941.818255] env[62684]: value = "task-2052895" [ 1941.818255] env[62684]: _type = "Task" [ 1941.818255] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1941.827039] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052895, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.871346] env[62684]: DEBUG oslo_vmware.api [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': task-2052894, 'name': ReconfigVM_Task, 'duration_secs': 0.309863} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1941.871570] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Reconfigured VM instance instance-00000034 to attach disk [datastore2] feca8680-4baa-4b2c-9875-69a88b351dc0/ephemeral_0.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1941.872322] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3ebdb09e-e816-4737-a609-32a87503e842 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.878208] env[62684]: DEBUG oslo_vmware.api [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Waiting for the task: (returnval){ [ 1941.878208] env[62684]: value = "task-2052896" [ 1941.878208] env[62684]: _type = "Task" [ 1941.878208] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1941.890229] env[62684]: DEBUG oslo_vmware.api [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': task-2052896, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.226427] env[62684]: ERROR nova.scheduler.client.report [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [req-0f8fe9a5-6d4d-47f8-a5d7-418d52d45563] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c23c281e-ec1f-4876-972e-a98655f2084f. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-0f8fe9a5-6d4d-47f8-a5d7-418d52d45563"}]} [ 1942.250312] env[62684]: DEBUG nova.scheduler.client.report [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Refreshing inventories for resource provider c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1942.271814] env[62684]: DEBUG nova.scheduler.client.report [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Updating ProviderTree inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1942.271814] env[62684]: DEBUG nova.compute.provider_tree [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1942.283149] env[62684]: DEBUG nova.scheduler.client.report [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Refreshing aggregate associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, aggregates: None {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1942.306432] env[62684]: DEBUG nova.scheduler.client.report [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Refreshing trait associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1942.328156] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052895, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064881} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1942.332257] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1942.332257] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72e09e58-1d62-447d-9c2c-096bc7d42f13 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.356514] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Reconfiguring VM instance instance-00000035 to attach disk [datastore2] b009f710-1a94-4113-8feb-7cc5dd6a6519/b009f710-1a94-4113-8feb-7cc5dd6a6519.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1942.359662] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a7b0a345-b83e-43b9-be15-eff4ecba72a0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.380231] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for the task: (returnval){ [ 1942.380231] env[62684]: value = "task-2052897" [ 1942.380231] env[62684]: _type = "Task" [ 1942.380231] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1942.391628] env[62684]: DEBUG oslo_vmware.api [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': task-2052896, 'name': Rename_Task, 'duration_secs': 0.203697} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1942.397295] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1942.397821] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052897, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.398430] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ce9846c0-490e-4b99-9cef-64349f472efb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.406454] env[62684]: DEBUG oslo_vmware.api [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Waiting for the task: (returnval){ [ 1942.406454] env[62684]: value = "task-2052898" [ 1942.406454] env[62684]: _type = "Task" [ 1942.406454] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1942.421082] env[62684]: DEBUG oslo_vmware.api [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': task-2052898, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.761733] env[62684]: DEBUG nova.virt.hardware [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1942.762171] env[62684]: DEBUG nova.virt.hardware [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1942.762508] env[62684]: DEBUG nova.virt.hardware [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1942.762821] env[62684]: DEBUG nova.virt.hardware [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1942.763160] env[62684]: DEBUG nova.virt.hardware [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1942.763416] env[62684]: DEBUG nova.virt.hardware [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1942.763810] env[62684]: DEBUG nova.virt.hardware [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1942.764086] env[62684]: DEBUG nova.virt.hardware [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1942.764436] env[62684]: DEBUG nova.virt.hardware [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1942.764708] env[62684]: DEBUG nova.virt.hardware [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1942.765082] env[62684]: DEBUG nova.virt.hardware [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1942.766242] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60a49a62-8f3c-4d68-92ac-f4742fb4d71b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.780031] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61a6378e-3744-4398-b555-223fd4cddd86 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.798821] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:11:51:36', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8bfa7abe-7e46-4d8f-b50a-4d0c4509e4dc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '617e9c54-b56e-4945-b890-de6be33b657b', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1942.808082] env[62684]: DEBUG oslo.service.loopingcall [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1942.811434] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1942.812634] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b1d770b3-4a30-4167-9f4b-2fe557b7e131 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.835335] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1942.835335] env[62684]: value = "task-2052899" [ 1942.835335] env[62684]: _type = "Task" [ 1942.835335] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1942.844786] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052899, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.893566] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052897, 'name': ReconfigVM_Task, 'duration_secs': 0.485223} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1942.893937] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Reconfigured VM instance instance-00000035 to attach disk [datastore2] b009f710-1a94-4113-8feb-7cc5dd6a6519/b009f710-1a94-4113-8feb-7cc5dd6a6519.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1942.894582] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a51d80bd-b2cf-4e95-a41e-ed7bfa97cf56 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.901761] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for the task: (returnval){ [ 1942.901761] env[62684]: value = "task-2052900" [ 1942.901761] env[62684]: _type = "Task" [ 1942.901761] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1942.903859] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dad1617-d695-40df-ab61-b40bb57b1071 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.924228] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6577212-358a-42c7-ac66-833e6a11fb0d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.928729] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052900, 'name': Rename_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.932558] env[62684]: DEBUG oslo_vmware.api [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': task-2052898, 'name': PowerOnVM_Task, 'duration_secs': 0.49723} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1942.933229] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1942.933490] env[62684]: INFO nova.compute.manager [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Took 9.38 seconds to spawn the instance on the hypervisor. [ 1942.933681] env[62684]: DEBUG nova.compute.manager [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1942.934608] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4993b8fd-3206-4b85-a3c3-9a50d8731ace {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.964575] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b346d6ac-75fd-4ed5-9698-26c6011c1aa1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.983865] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db03cd5b-9f35-4966-9c25-7f4d4b348b46 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.006745] env[62684]: DEBUG nova.compute.provider_tree [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1943.348757] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052899, 'name': CreateVM_Task, 'duration_secs': 0.349509} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1943.351603] env[62684]: DEBUG oslo_vmware.rw_handles [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524b79ae-d98b-d000-d6b1-43e7b66adc6b/disk-0.vmdk. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1943.351603] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1943.352063] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f45cd7ae-f790-455a-9332-d4e922f053c0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.356357] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1943.356629] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1943.356863] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1943.357154] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-316fce5c-1b0e-47e5-b20c-d181d1a02420 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.362917] env[62684]: DEBUG oslo_vmware.rw_handles [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524b79ae-d98b-d000-d6b1-43e7b66adc6b/disk-0.vmdk is in state: ready. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1943.362992] env[62684]: ERROR oslo_vmware.rw_handles [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524b79ae-d98b-d000-d6b1-43e7b66adc6b/disk-0.vmdk due to incomplete transfer. [ 1943.364839] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-25eb18cf-cd02-4e80-bbf9-2acec514cbc0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.366923] env[62684]: DEBUG oslo_vmware.api [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1943.366923] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]522daff8-ff10-cb7d-89f1-e677bb106a62" [ 1943.366923] env[62684]: _type = "Task" [ 1943.366923] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1943.372274] env[62684]: DEBUG oslo_vmware.rw_handles [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524b79ae-d98b-d000-d6b1-43e7b66adc6b/disk-0.vmdk. {{(pid=62684) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1943.372489] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Uploaded image 0a70b1eb-e88e-4218-abd5-edf1113ecf05 to the Glance image server {{(pid=62684) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1943.375376] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Destroying the VM {{(pid=62684) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1943.380104] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-524e6100-7b2f-4c1a-8599-007804d30c1d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.381807] env[62684]: DEBUG oslo_vmware.api [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]522daff8-ff10-cb7d-89f1-e677bb106a62, 'name': SearchDatastore_Task, 'duration_secs': 0.009574} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1943.382098] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1943.382338] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1943.382572] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1943.382717] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1943.382893] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1943.385134] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-60d75d1b-0512-41ce-ac99-287b2d0e49a6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.390433] env[62684]: DEBUG oslo_vmware.api [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1943.390433] env[62684]: value = "task-2052901" [ 1943.390433] env[62684]: _type = "Task" [ 1943.390433] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1943.395991] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1943.397209] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1943.398290] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-546e76da-4e7c-4437-ab7d-81a5b3a4d56a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.407114] env[62684]: DEBUG oslo_vmware.api [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052901, 'name': Destroy_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.414614] env[62684]: DEBUG oslo_vmware.api [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1943.414614] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523a1c2f-3663-23ce-7225-ffe5a9465b50" [ 1943.414614] env[62684]: _type = "Task" [ 1943.414614] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1943.421799] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052900, 'name': Rename_Task, 'duration_secs': 0.175771} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1943.423994] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1943.423994] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-706b534f-2853-43f0-9014-bbd8f3e33044 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.429050] env[62684]: DEBUG oslo_vmware.api [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523a1c2f-3663-23ce-7225-ffe5a9465b50, 'name': SearchDatastore_Task, 'duration_secs': 0.009025} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1943.430801] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fea8ebe1-b3ac-4e1b-9e0f-b62e855f5640 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.436163] env[62684]: DEBUG oslo_vmware.api [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1943.436163] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a7329a-66fe-8531-0279-bd8560153255" [ 1943.436163] env[62684]: _type = "Task" [ 1943.436163] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1943.437827] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for the task: (returnval){ [ 1943.437827] env[62684]: value = "task-2052902" [ 1943.437827] env[62684]: _type = "Task" [ 1943.437827] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1943.450591] env[62684]: DEBUG oslo_vmware.api [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a7329a-66fe-8531-0279-bd8560153255, 'name': SearchDatastore_Task, 'duration_secs': 0.009749} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1943.453714] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1943.453958] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] dcb0a5b2-379e-44ff-a9b0-be615943c94e/dcb0a5b2-379e-44ff-a9b0-be615943c94e.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1943.454271] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052902, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.454507] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e38bc8e2-b607-4d17-ad44-32cdab6ea1a1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.461548] env[62684]: DEBUG oslo_vmware.api [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1943.461548] env[62684]: value = "task-2052903" [ 1943.461548] env[62684]: _type = "Task" [ 1943.461548] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1943.469615] env[62684]: DEBUG oslo_vmware.api [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052903, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.493484] env[62684]: INFO nova.compute.manager [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Took 57.04 seconds to build instance. [ 1943.547058] env[62684]: DEBUG nova.scheduler.client.report [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Updated inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f with generation 86 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1943.547519] env[62684]: DEBUG nova.compute.provider_tree [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Updating resource provider c23c281e-ec1f-4876-972e-a98655f2084f generation from 86 to 87 during operation: update_inventory {{(pid=62684) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1943.547819] env[62684]: DEBUG nova.compute.provider_tree [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1943.905058] env[62684]: DEBUG oslo_vmware.api [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052901, 'name': Destroy_Task, 'duration_secs': 0.490789} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1943.905058] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Destroyed the VM [ 1943.905058] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Deleting Snapshot of the VM instance {{(pid=62684) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1943.905058] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-dbebe678-571b-44a8-b2cc-4751bec488b4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.911880] env[62684]: DEBUG oslo_vmware.api [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1943.911880] env[62684]: value = "task-2052904" [ 1943.911880] env[62684]: _type = "Task" [ 1943.911880] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1943.927890] env[62684]: DEBUG oslo_vmware.api [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052904, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.954402] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052902, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.972320] env[62684]: DEBUG oslo_vmware.api [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052903, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.492013} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1943.972696] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] dcb0a5b2-379e-44ff-a9b0-be615943c94e/dcb0a5b2-379e-44ff-a9b0-be615943c94e.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1943.972979] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1943.973338] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-110e0dd4-9956-4d19-bcd0-bbbc6c3f9420 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.987489] env[62684]: DEBUG oslo_vmware.api [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1943.987489] env[62684]: value = "task-2052905" [ 1943.987489] env[62684]: _type = "Task" [ 1943.987489] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1943.996591] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dabfa1ea-4052-4c0f-962c-33bb1b87f6f5 tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Lock "feca8680-4baa-4b2c-9875-69a88b351dc0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.835s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1943.996935] env[62684]: DEBUG oslo_vmware.api [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052905, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.054101] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.959s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1944.054398] env[62684]: DEBUG nova.compute.manager [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1944.061755] env[62684]: DEBUG oslo_concurrency.lockutils [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 48.376s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1944.063247] env[62684]: INFO nova.compute.claims [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1944.423484] env[62684]: DEBUG oslo_vmware.api [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052904, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.452860] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052902, 'name': PowerOnVM_Task, 'duration_secs': 0.572208} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1944.453185] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1944.453440] env[62684]: INFO nova.compute.manager [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Took 8.74 seconds to spawn the instance on the hypervisor. [ 1944.453681] env[62684]: DEBUG nova.compute.manager [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1944.454499] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfcc5d84-8132-42d0-b2c3-d9b73ab043eb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.495442] env[62684]: DEBUG oslo_vmware.api [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052905, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.096154} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1944.495794] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1944.496854] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-406b9b16-abd6-47ce-a9c9-5374b12cae30 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.500365] env[62684]: DEBUG nova.compute.manager [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1944.525694] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Reconfiguring VM instance instance-0000000f to attach disk [datastore1] dcb0a5b2-379e-44ff-a9b0-be615943c94e/dcb0a5b2-379e-44ff-a9b0-be615943c94e.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1944.530213] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f1cee62-3b1c-42b9-ab4b-2e6e6d2ad3f6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.549241] env[62684]: DEBUG oslo_vmware.api [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1944.549241] env[62684]: value = "task-2052906" [ 1944.549241] env[62684]: _type = "Task" [ 1944.549241] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1944.558850] env[62684]: DEBUG oslo_vmware.api [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052906, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.560214] env[62684]: DEBUG nova.compute.utils [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1944.562097] env[62684]: DEBUG nova.compute.manager [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1944.562307] env[62684]: DEBUG nova.network.neutron [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1944.686701] env[62684]: DEBUG nova.policy [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5892ca114a9d4a6e95c0498c2fc7f2ba', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '61c83953d09c4d1c97eee5a8679c30d4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1944.927147] env[62684]: DEBUG oslo_vmware.api [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052904, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.972026] env[62684]: INFO nova.compute.manager [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Took 56.65 seconds to build instance. [ 1945.047277] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1945.060248] env[62684]: DEBUG oslo_vmware.api [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052906, 'name': ReconfigVM_Task, 'duration_secs': 0.289594} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1945.060544] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Reconfigured VM instance instance-0000000f to attach disk [datastore1] dcb0a5b2-379e-44ff-a9b0-be615943c94e/dcb0a5b2-379e-44ff-a9b0-be615943c94e.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1945.061227] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e2fd032e-e8d7-4c91-a9c1-91d32021ab7e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.063141] env[62684]: DEBUG nova.compute.manager [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1945.074956] env[62684]: DEBUG oslo_vmware.api [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1945.074956] env[62684]: value = "task-2052907" [ 1945.074956] env[62684]: _type = "Task" [ 1945.074956] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1945.085903] env[62684]: DEBUG oslo_vmware.api [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052907, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1945.292220] env[62684]: DEBUG nova.compute.manager [req-6d44efd8-6f98-4c74-b85d-60f924ee9e7e req-8dbaa82f-88e4-4907-a70c-90e741f5a778 service nova] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Received event network-changed-cd978df3-9bd9-4010-88e9-d5a4127cf2a9 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1945.292220] env[62684]: DEBUG nova.compute.manager [req-6d44efd8-6f98-4c74-b85d-60f924ee9e7e req-8dbaa82f-88e4-4907-a70c-90e741f5a778 service nova] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Refreshing instance network info cache due to event network-changed-cd978df3-9bd9-4010-88e9-d5a4127cf2a9. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1945.292220] env[62684]: DEBUG oslo_concurrency.lockutils [req-6d44efd8-6f98-4c74-b85d-60f924ee9e7e req-8dbaa82f-88e4-4907-a70c-90e741f5a778 service nova] Acquiring lock "refresh_cache-feca8680-4baa-4b2c-9875-69a88b351dc0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1945.292220] env[62684]: DEBUG oslo_concurrency.lockutils [req-6d44efd8-6f98-4c74-b85d-60f924ee9e7e req-8dbaa82f-88e4-4907-a70c-90e741f5a778 service nova] Acquired lock "refresh_cache-feca8680-4baa-4b2c-9875-69a88b351dc0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1945.292220] env[62684]: DEBUG nova.network.neutron [req-6d44efd8-6f98-4c74-b85d-60f924ee9e7e req-8dbaa82f-88e4-4907-a70c-90e741f5a778 service nova] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Refreshing network info cache for port cd978df3-9bd9-4010-88e9-d5a4127cf2a9 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1945.397737] env[62684]: DEBUG oslo_concurrency.lockutils [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "6faeae10-c0bd-4297-b992-c05511fedb21" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1945.397737] env[62684]: DEBUG oslo_concurrency.lockutils [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "6faeae10-c0bd-4297-b992-c05511fedb21" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1945.428243] env[62684]: DEBUG oslo_vmware.api [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052904, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1945.474032] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lock "b009f710-1a94-4113-8feb-7cc5dd6a6519" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.197s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1945.587491] env[62684]: DEBUG oslo_vmware.api [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052907, 'name': Rename_Task, 'duration_secs': 0.172736} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1945.590677] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1945.591924] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dffa32c8-5e24-4ef4-8719-b1850081578a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.600134] env[62684]: DEBUG oslo_vmware.api [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1945.600134] env[62684]: value = "task-2052908" [ 1945.600134] env[62684]: _type = "Task" [ 1945.600134] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1945.607745] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe440aaf-ab46-4c72-9522-7f131e95e4a7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.617302] env[62684]: DEBUG oslo_vmware.api [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052908, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1945.628120] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-946bb9c2-137b-4bb4-a825-e0764bcf0ff8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.665220] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00a45b87-d468-40ff-96bd-ab72a4d4dc03 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.674732] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d57475ae-8872-4011-ab2b-cc21b1a752d4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.689392] env[62684]: DEBUG nova.compute.provider_tree [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1945.698819] env[62684]: DEBUG nova.network.neutron [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Successfully created port: 30f49652-7f4b-41c8-8bbd-33a9e6b81e7d {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1945.927173] env[62684]: DEBUG oslo_vmware.api [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2052904, 'name': RemoveSnapshot_Task, 'duration_secs': 1.568426} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1945.929924] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Deleted Snapshot of the VM instance {{(pid=62684) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1945.930035] env[62684]: INFO nova.compute.manager [None req-8368bdaa-b705-452c-99bf-92f71e851d28 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Took 16.86 seconds to snapshot the instance on the hypervisor. [ 1945.976338] env[62684]: DEBUG nova.compute.manager [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1946.057901] env[62684]: DEBUG nova.network.neutron [req-6d44efd8-6f98-4c74-b85d-60f924ee9e7e req-8dbaa82f-88e4-4907-a70c-90e741f5a778 service nova] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Updated VIF entry in instance network info cache for port cd978df3-9bd9-4010-88e9-d5a4127cf2a9. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1946.057901] env[62684]: DEBUG nova.network.neutron [req-6d44efd8-6f98-4c74-b85d-60f924ee9e7e req-8dbaa82f-88e4-4907-a70c-90e741f5a778 service nova] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Updating instance_info_cache with network_info: [{"id": "cd978df3-9bd9-4010-88e9-d5a4127cf2a9", "address": "fa:16:3e:b8:e7:a7", "network": {"id": "7982dcb9-e661-4690-9931-bf412f4a564e", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1278071472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72ac36eda47d4c51a4b421c764d0404d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9bb629cd-6d0f-4bed-965c-bd04a2f3ec49", "external-id": "nsx-vlan-transportzone-848", "segmentation_id": 848, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd978df3-9b", "ovs_interfaceid": "cd978df3-9bd9-4010-88e9-d5a4127cf2a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1946.072688] env[62684]: DEBUG nova.compute.manager [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1946.115032] env[62684]: DEBUG oslo_vmware.api [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052908, 'name': PowerOnVM_Task, 'duration_secs': 0.493036} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1946.115555] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1946.115555] env[62684]: DEBUG nova.compute.manager [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1946.116679] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c71c575-d913-44c8-8b9b-a48bdff2e2da {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.132658] env[62684]: DEBUG nova.virt.hardware [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1946.133756] env[62684]: DEBUG nova.virt.hardware [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1946.133961] env[62684]: DEBUG nova.virt.hardware [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1946.134245] env[62684]: DEBUG nova.virt.hardware [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1946.134362] env[62684]: DEBUG nova.virt.hardware [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1946.134515] env[62684]: DEBUG nova.virt.hardware [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1946.134729] env[62684]: DEBUG nova.virt.hardware [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1946.134891] env[62684]: DEBUG nova.virt.hardware [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1946.135076] env[62684]: DEBUG nova.virt.hardware [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1946.135246] env[62684]: DEBUG nova.virt.hardware [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1946.135422] env[62684]: DEBUG nova.virt.hardware [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1946.136569] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c627faf8-b023-47a0-8a53-d7bb25b80436 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.145689] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26c11cf1-df2a-4261-989d-dbac26134966 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.217038] env[62684]: ERROR nova.scheduler.client.report [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] [req-7d147b4c-cbcb-4c92-8122-bc3ab60c3098] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c23c281e-ec1f-4876-972e-a98655f2084f. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-7d147b4c-cbcb-4c92-8122-bc3ab60c3098"}]} [ 1946.232822] env[62684]: DEBUG nova.scheduler.client.report [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Refreshing inventories for resource provider c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1946.260474] env[62684]: DEBUG nova.scheduler.client.report [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Updating ProviderTree inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1946.260739] env[62684]: DEBUG nova.compute.provider_tree [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1946.271951] env[62684]: DEBUG nova.scheduler.client.report [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Refreshing aggregate associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, aggregates: None {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1946.292307] env[62684]: DEBUG nova.scheduler.client.report [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Refreshing trait associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1946.509477] env[62684]: DEBUG oslo_concurrency.lockutils [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1946.557661] env[62684]: DEBUG oslo_concurrency.lockutils [req-6d44efd8-6f98-4c74-b85d-60f924ee9e7e req-8dbaa82f-88e4-4907-a70c-90e741f5a778 service nova] Releasing lock "refresh_cache-feca8680-4baa-4b2c-9875-69a88b351dc0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1946.650880] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1946.773117] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5cb9eb4-4d48-4bc3-9bec-f5d0630cb9db {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.780949] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4c3d836-44c3-43b6-94d2-25ddef360c07 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.816652] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca16e7d1-d0a8-4d3b-8951-b350f76bbef3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.824674] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2c24226-d5c6-4f40-9083-8127abc42c08 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.838198] env[62684]: DEBUG nova.compute.provider_tree [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1947.374512] env[62684]: DEBUG nova.scheduler.client.report [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Updated inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f with generation 88 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1947.374806] env[62684]: DEBUG nova.compute.provider_tree [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Updating resource provider c23c281e-ec1f-4876-972e-a98655f2084f generation from 88 to 89 during operation: update_inventory {{(pid=62684) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1947.375014] env[62684]: DEBUG nova.compute.provider_tree [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1947.685030] env[62684]: INFO nova.compute.manager [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Rebuilding instance [ 1947.742083] env[62684]: DEBUG nova.compute.manager [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1947.743041] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-383edb39-796e-43f3-8a4e-11791fcd8c8d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.771857] env[62684]: DEBUG nova.compute.manager [req-afe28292-cbc2-402a-937c-32e76d802af0 req-9fb560a5-cb01-4462-a24a-f34fb6a767b6 service nova] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Received event network-vif-plugged-30f49652-7f4b-41c8-8bbd-33a9e6b81e7d {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1947.772159] env[62684]: DEBUG oslo_concurrency.lockutils [req-afe28292-cbc2-402a-937c-32e76d802af0 req-9fb560a5-cb01-4462-a24a-f34fb6a767b6 service nova] Acquiring lock "df93c57e-716c-4c73-b551-9079a523ea0b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1947.774017] env[62684]: DEBUG oslo_concurrency.lockutils [req-afe28292-cbc2-402a-937c-32e76d802af0 req-9fb560a5-cb01-4462-a24a-f34fb6a767b6 service nova] Lock "df93c57e-716c-4c73-b551-9079a523ea0b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1947.774017] env[62684]: DEBUG oslo_concurrency.lockutils [req-afe28292-cbc2-402a-937c-32e76d802af0 req-9fb560a5-cb01-4462-a24a-f34fb6a767b6 service nova] Lock "df93c57e-716c-4c73-b551-9079a523ea0b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1947.774017] env[62684]: DEBUG nova.compute.manager [req-afe28292-cbc2-402a-937c-32e76d802af0 req-9fb560a5-cb01-4462-a24a-f34fb6a767b6 service nova] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] No waiting events found dispatching network-vif-plugged-30f49652-7f4b-41c8-8bbd-33a9e6b81e7d {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1947.774017] env[62684]: WARNING nova.compute.manager [req-afe28292-cbc2-402a-937c-32e76d802af0 req-9fb560a5-cb01-4462-a24a-f34fb6a767b6 service nova] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Received unexpected event network-vif-plugged-30f49652-7f4b-41c8-8bbd-33a9e6b81e7d for instance with vm_state building and task_state spawning. [ 1947.882687] env[62684]: DEBUG oslo_concurrency.lockutils [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.822s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1947.883476] env[62684]: DEBUG nova.compute.manager [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1947.888245] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 42.894s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1947.889831] env[62684]: INFO nova.compute.claims [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1947.916507] env[62684]: DEBUG nova.network.neutron [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Successfully updated port: 30f49652-7f4b-41c8-8bbd-33a9e6b81e7d {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1948.255377] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1948.256080] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c0ac2b44-4157-4418-9f1a-4b5a1a9bc8cb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.263600] env[62684]: DEBUG oslo_vmware.api [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1948.263600] env[62684]: value = "task-2052909" [ 1948.263600] env[62684]: _type = "Task" [ 1948.263600] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1948.271732] env[62684]: DEBUG oslo_vmware.api [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052909, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1948.395718] env[62684]: DEBUG nova.compute.utils [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1948.400565] env[62684]: DEBUG nova.compute.manager [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1948.400758] env[62684]: DEBUG nova.network.neutron [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1948.419841] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquiring lock "refresh_cache-df93c57e-716c-4c73-b551-9079a523ea0b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1948.419981] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquired lock "refresh_cache-df93c57e-716c-4c73-b551-9079a523ea0b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1948.420151] env[62684]: DEBUG nova.network.neutron [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1948.477040] env[62684]: DEBUG nova.policy [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6ee3dcd733ed47348abbcdc3728fda04', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e026ed8417b84dcaa31ef7b09997faf1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1948.776589] env[62684]: DEBUG oslo_vmware.api [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052909, 'name': PowerOffVM_Task, 'duration_secs': 0.186239} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1948.776881] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1948.777106] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1948.777899] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef67a1f8-25ab-48a0-82a5-2c85c7c29164 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.787265] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1948.787549] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f46f8718-c35c-4f76-9c2a-43754d7ee713 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.901046] env[62684]: DEBUG nova.compute.manager [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1948.949629] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1948.949920] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1948.950127] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Deleting the datastore file [datastore1] dcb0a5b2-379e-44ff-a9b0-be615943c94e {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1948.951124] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d95769a8-8c96-410a-9094-c6e96a991ee5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.965591] env[62684]: DEBUG oslo_vmware.api [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1948.965591] env[62684]: value = "task-2052911" [ 1948.965591] env[62684]: _type = "Task" [ 1948.965591] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1948.970268] env[62684]: DEBUG nova.network.neutron [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1948.978674] env[62684]: DEBUG oslo_vmware.api [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052911, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1948.978674] env[62684]: DEBUG nova.network.neutron [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Successfully created port: 31109cc7-c34c-42b7-8245-d5d2e71162ad {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1949.132349] env[62684]: DEBUG nova.network.neutron [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Updating instance_info_cache with network_info: [{"id": "30f49652-7f4b-41c8-8bbd-33a9e6b81e7d", "address": "fa:16:3e:83:22:31", "network": {"id": "95ad5a29-9716-4b0c-937b-33a498c74ef7", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-342262548-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61c83953d09c4d1c97eee5a8679c30d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c68b7663-4f0e-47f0-ac7f-40c6d952f7bb", "external-id": "nsx-vlan-transportzone-696", "segmentation_id": 696, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30f49652-7f", "ovs_interfaceid": "30f49652-7f4b-41c8-8bbd-33a9e6b81e7d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1949.365177] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-052d50f5-a6a0-41bc-a10e-b5433e6a8cfa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.373328] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14e5366f-6ea6-4993-97d1-48d5b1896dca {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.403918] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b27037c0-c540-4bcf-b1bf-bde00a9a1ab8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.415120] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-710aa2e1-dbb0-434f-bd37-a2b4e61728a6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.428754] env[62684]: DEBUG nova.compute.provider_tree [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1949.475702] env[62684]: DEBUG oslo_vmware.api [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052911, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.222957} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1949.475702] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1949.475702] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1949.475805] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1949.635666] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Releasing lock "refresh_cache-df93c57e-716c-4c73-b551-9079a523ea0b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1949.636028] env[62684]: DEBUG nova.compute.manager [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Instance network_info: |[{"id": "30f49652-7f4b-41c8-8bbd-33a9e6b81e7d", "address": "fa:16:3e:83:22:31", "network": {"id": "95ad5a29-9716-4b0c-937b-33a498c74ef7", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-342262548-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61c83953d09c4d1c97eee5a8679c30d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c68b7663-4f0e-47f0-ac7f-40c6d952f7bb", "external-id": "nsx-vlan-transportzone-696", "segmentation_id": 696, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30f49652-7f", "ovs_interfaceid": "30f49652-7f4b-41c8-8bbd-33a9e6b81e7d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1949.636509] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:83:22:31', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c68b7663-4f0e-47f0-ac7f-40c6d952f7bb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '30f49652-7f4b-41c8-8bbd-33a9e6b81e7d', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1949.644147] env[62684]: DEBUG oslo.service.loopingcall [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1949.644760] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1949.645474] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b6d0e8d1-d6cf-4a48-8c04-520e463120dd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.665572] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1949.665572] env[62684]: value = "task-2052912" [ 1949.665572] env[62684]: _type = "Task" [ 1949.665572] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1949.673239] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052912, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.838036] env[62684]: DEBUG nova.compute.manager [req-e08b7062-24c2-4db7-8cc7-fe9a7e107f8b req-ce13f8f7-999d-46ae-ab8c-56f6b3d45104 service nova] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Received event network-changed-30f49652-7f4b-41c8-8bbd-33a9e6b81e7d {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1949.838396] env[62684]: DEBUG nova.compute.manager [req-e08b7062-24c2-4db7-8cc7-fe9a7e107f8b req-ce13f8f7-999d-46ae-ab8c-56f6b3d45104 service nova] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Refreshing instance network info cache due to event network-changed-30f49652-7f4b-41c8-8bbd-33a9e6b81e7d. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1949.838517] env[62684]: DEBUG oslo_concurrency.lockutils [req-e08b7062-24c2-4db7-8cc7-fe9a7e107f8b req-ce13f8f7-999d-46ae-ab8c-56f6b3d45104 service nova] Acquiring lock "refresh_cache-df93c57e-716c-4c73-b551-9079a523ea0b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1949.838750] env[62684]: DEBUG oslo_concurrency.lockutils [req-e08b7062-24c2-4db7-8cc7-fe9a7e107f8b req-ce13f8f7-999d-46ae-ab8c-56f6b3d45104 service nova] Acquired lock "refresh_cache-df93c57e-716c-4c73-b551-9079a523ea0b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1949.838871] env[62684]: DEBUG nova.network.neutron [req-e08b7062-24c2-4db7-8cc7-fe9a7e107f8b req-ce13f8f7-999d-46ae-ab8c-56f6b3d45104 service nova] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Refreshing network info cache for port 30f49652-7f4b-41c8-8bbd-33a9e6b81e7d {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1949.920969] env[62684]: DEBUG nova.compute.manager [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1949.932263] env[62684]: DEBUG nova.scheduler.client.report [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1949.955412] env[62684]: DEBUG nova.virt.hardware [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1949.955680] env[62684]: DEBUG nova.virt.hardware [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1949.955845] env[62684]: DEBUG nova.virt.hardware [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1949.956046] env[62684]: DEBUG nova.virt.hardware [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1949.956228] env[62684]: DEBUG nova.virt.hardware [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1949.956388] env[62684]: DEBUG nova.virt.hardware [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1949.956605] env[62684]: DEBUG nova.virt.hardware [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1949.956768] env[62684]: DEBUG nova.virt.hardware [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1949.956938] env[62684]: DEBUG nova.virt.hardware [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1949.957120] env[62684]: DEBUG nova.virt.hardware [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1949.957305] env[62684]: DEBUG nova.virt.hardware [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1949.958467] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a65638b-06cd-499c-a015-def20187fbed {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.966627] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2819665-e8a6-4084-a874-650f455c95cb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.175963] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052912, 'name': CreateVM_Task, 'duration_secs': 0.316723} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1950.176175] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1950.176876] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1950.177059] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1950.177408] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1950.177668] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ca9c5ee-249a-471c-a978-b04c48e15cd2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.182451] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for the task: (returnval){ [ 1950.182451] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5265c3a7-e71b-2207-98ec-8d78e68d097f" [ 1950.182451] env[62684]: _type = "Task" [ 1950.182451] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1950.190191] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5265c3a7-e71b-2207-98ec-8d78e68d097f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.438390] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.550s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1950.438911] env[62684]: DEBUG nova.compute.manager [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1950.442574] env[62684]: DEBUG oslo_concurrency.lockutils [None req-623e5e0c-4824-4f44-9b42-3f3a200dc2df tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 43.839s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1950.442832] env[62684]: DEBUG oslo_concurrency.lockutils [None req-623e5e0c-4824-4f44-9b42-3f3a200dc2df tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1950.445744] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.647s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1950.447699] env[62684]: INFO nova.compute.claims [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1950.477491] env[62684]: INFO nova.scheduler.client.report [None req-623e5e0c-4824-4f44-9b42-3f3a200dc2df tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Deleted allocations for instance ab2c7cbe-6f46-4174-bffb-055a15f2d56b [ 1950.515911] env[62684]: DEBUG nova.virt.hardware [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1950.515911] env[62684]: DEBUG nova.virt.hardware [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1950.515911] env[62684]: DEBUG nova.virt.hardware [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1950.515911] env[62684]: DEBUG nova.virt.hardware [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1950.515911] env[62684]: DEBUG nova.virt.hardware [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1950.515911] env[62684]: DEBUG nova.virt.hardware [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1950.516169] env[62684]: DEBUG nova.virt.hardware [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1950.516169] env[62684]: DEBUG nova.virt.hardware [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1950.517224] env[62684]: DEBUG nova.virt.hardware [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1950.517224] env[62684]: DEBUG nova.virt.hardware [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1950.517224] env[62684]: DEBUG nova.virt.hardware [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1950.517922] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ff3e565-1178-4f0c-bfa9-553faef3e79b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.525473] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06b9a7f0-fc3f-4e08-a16d-d83f5b7d17f9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.542707] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:11:51:36', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8bfa7abe-7e46-4d8f-b50a-4d0c4509e4dc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '617e9c54-b56e-4945-b890-de6be33b657b', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1950.550106] env[62684]: DEBUG oslo.service.loopingcall [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1950.552882] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1950.553153] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-668b4d16-905d-4fe8-b01f-d03afed03a35 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.575567] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1950.575567] env[62684]: value = "task-2052913" [ 1950.575567] env[62684]: _type = "Task" [ 1950.575567] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1950.583987] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052913, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.606028] env[62684]: DEBUG nova.network.neutron [req-e08b7062-24c2-4db7-8cc7-fe9a7e107f8b req-ce13f8f7-999d-46ae-ab8c-56f6b3d45104 service nova] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Updated VIF entry in instance network info cache for port 30f49652-7f4b-41c8-8bbd-33a9e6b81e7d. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1950.606465] env[62684]: DEBUG nova.network.neutron [req-e08b7062-24c2-4db7-8cc7-fe9a7e107f8b req-ce13f8f7-999d-46ae-ab8c-56f6b3d45104 service nova] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Updating instance_info_cache with network_info: [{"id": "30f49652-7f4b-41c8-8bbd-33a9e6b81e7d", "address": "fa:16:3e:83:22:31", "network": {"id": "95ad5a29-9716-4b0c-937b-33a498c74ef7", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-342262548-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61c83953d09c4d1c97eee5a8679c30d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c68b7663-4f0e-47f0-ac7f-40c6d952f7bb", "external-id": "nsx-vlan-transportzone-696", "segmentation_id": 696, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30f49652-7f", "ovs_interfaceid": "30f49652-7f4b-41c8-8bbd-33a9e6b81e7d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1950.695945] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5265c3a7-e71b-2207-98ec-8d78e68d097f, 'name': SearchDatastore_Task, 'duration_secs': 0.0102} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1950.695945] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1950.695945] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1950.695945] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1950.695945] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1950.695945] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1950.695945] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-030d5104-6568-4f1a-b589-e8c87bdd3de0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.703515] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1950.703700] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1950.704449] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-463f7a7b-611c-4f40-be53-abc90a696721 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.709822] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for the task: (returnval){ [ 1950.709822] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f44ee0-06d5-32bf-6bde-a89145df59ee" [ 1950.709822] env[62684]: _type = "Task" [ 1950.709822] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1950.717571] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f44ee0-06d5-32bf-6bde-a89145df59ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.849093] env[62684]: DEBUG nova.network.neutron [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Successfully updated port: 31109cc7-c34c-42b7-8245-d5d2e71162ad {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1950.954705] env[62684]: DEBUG nova.compute.utils [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1950.957762] env[62684]: DEBUG nova.compute.manager [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1950.957956] env[62684]: DEBUG nova.network.neutron [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1950.986557] env[62684]: DEBUG oslo_concurrency.lockutils [None req-623e5e0c-4824-4f44-9b42-3f3a200dc2df tempest-ListImageFiltersTestJSON-1064551173 tempest-ListImageFiltersTestJSON-1064551173-project-member] Lock "ab2c7cbe-6f46-4174-bffb-055a15f2d56b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 48.122s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1951.057652] env[62684]: DEBUG nova.policy [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '88973e8d46da4fb892607ba42eefe323', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9473d6611ded48cd9e9d10a6f0bf1a3c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1951.085279] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052913, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1951.109311] env[62684]: DEBUG oslo_concurrency.lockutils [req-e08b7062-24c2-4db7-8cc7-fe9a7e107f8b req-ce13f8f7-999d-46ae-ab8c-56f6b3d45104 service nova] Releasing lock "refresh_cache-df93c57e-716c-4c73-b551-9079a523ea0b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1951.219793] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f44ee0-06d5-32bf-6bde-a89145df59ee, 'name': SearchDatastore_Task, 'duration_secs': 0.0089} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1951.220671] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5a7a602-3787-4f3e-9a2e-aa5baf108c7a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.226819] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for the task: (returnval){ [ 1951.226819] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]525d3f02-ca61-5a69-ff58-7eb991f52698" [ 1951.226819] env[62684]: _type = "Task" [ 1951.226819] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1951.235293] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]525d3f02-ca61-5a69-ff58-7eb991f52698, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1951.356773] env[62684]: DEBUG oslo_concurrency.lockutils [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Acquiring lock "refresh_cache-a3c7943e-7528-41bc-9a20-1e2b57f832e3" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1951.356925] env[62684]: DEBUG oslo_concurrency.lockutils [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Acquired lock "refresh_cache-a3c7943e-7528-41bc-9a20-1e2b57f832e3" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1951.357077] env[62684]: DEBUG nova.network.neutron [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1951.458761] env[62684]: DEBUG nova.compute.manager [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1951.544288] env[62684]: DEBUG nova.network.neutron [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Successfully created port: d15c8cc5-3728-42eb-8a3a-e5b3390bf5dd {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1951.591115] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052913, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1951.737499] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]525d3f02-ca61-5a69-ff58-7eb991f52698, 'name': SearchDatastore_Task, 'duration_secs': 0.012034} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1951.739924] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1951.740270] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] df93c57e-716c-4c73-b551-9079a523ea0b/df93c57e-716c-4c73-b551-9079a523ea0b.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1951.740667] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bb0cca5e-c3c7-4730-b3c3-70a453e83614 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.747448] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for the task: (returnval){ [ 1951.747448] env[62684]: value = "task-2052914" [ 1951.747448] env[62684]: _type = "Task" [ 1951.747448] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1951.755588] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052914, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1951.875095] env[62684]: DEBUG nova.compute.manager [req-eb315981-2041-47e9-9b89-a6a575643e29 req-6eb9f48f-745b-434d-86d4-4002b2487d13 service nova] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Received event network-vif-plugged-31109cc7-c34c-42b7-8245-d5d2e71162ad {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1951.875484] env[62684]: DEBUG oslo_concurrency.lockutils [req-eb315981-2041-47e9-9b89-a6a575643e29 req-6eb9f48f-745b-434d-86d4-4002b2487d13 service nova] Acquiring lock "a3c7943e-7528-41bc-9a20-1e2b57f832e3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1951.875807] env[62684]: DEBUG oslo_concurrency.lockutils [req-eb315981-2041-47e9-9b89-a6a575643e29 req-6eb9f48f-745b-434d-86d4-4002b2487d13 service nova] Lock "a3c7943e-7528-41bc-9a20-1e2b57f832e3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1951.875999] env[62684]: DEBUG oslo_concurrency.lockutils [req-eb315981-2041-47e9-9b89-a6a575643e29 req-6eb9f48f-745b-434d-86d4-4002b2487d13 service nova] Lock "a3c7943e-7528-41bc-9a20-1e2b57f832e3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1951.876243] env[62684]: DEBUG nova.compute.manager [req-eb315981-2041-47e9-9b89-a6a575643e29 req-6eb9f48f-745b-434d-86d4-4002b2487d13 service nova] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] No waiting events found dispatching network-vif-plugged-31109cc7-c34c-42b7-8245-d5d2e71162ad {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1951.876428] env[62684]: WARNING nova.compute.manager [req-eb315981-2041-47e9-9b89-a6a575643e29 req-6eb9f48f-745b-434d-86d4-4002b2487d13 service nova] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Received unexpected event network-vif-plugged-31109cc7-c34c-42b7-8245-d5d2e71162ad for instance with vm_state building and task_state spawning. [ 1951.876619] env[62684]: DEBUG nova.compute.manager [req-eb315981-2041-47e9-9b89-a6a575643e29 req-6eb9f48f-745b-434d-86d4-4002b2487d13 service nova] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Received event network-changed-31109cc7-c34c-42b7-8245-d5d2e71162ad {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1951.876785] env[62684]: DEBUG nova.compute.manager [req-eb315981-2041-47e9-9b89-a6a575643e29 req-6eb9f48f-745b-434d-86d4-4002b2487d13 service nova] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Refreshing instance network info cache due to event network-changed-31109cc7-c34c-42b7-8245-d5d2e71162ad. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1951.876973] env[62684]: DEBUG oslo_concurrency.lockutils [req-eb315981-2041-47e9-9b89-a6a575643e29 req-6eb9f48f-745b-434d-86d4-4002b2487d13 service nova] Acquiring lock "refresh_cache-a3c7943e-7528-41bc-9a20-1e2b57f832e3" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1951.907414] env[62684]: DEBUG nova.network.neutron [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1951.945988] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b50057d-eb61-44ef-89d3-c0c3f9ae5930 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.954469] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf97693d-2ad1-450b-bd3a-cc3aa7dfe3e7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.005603] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d90f77c-c1c9-496a-b0af-376353b2ec20 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.015732] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a892582-81e0-4503-9630-32fb35923375 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.031616] env[62684]: DEBUG nova.compute.provider_tree [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1952.089746] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052913, 'name': CreateVM_Task, 'duration_secs': 1.331887} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1952.089992] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1952.090736] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1952.090923] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1952.091292] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1952.091619] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87b9dc87-4323-4210-87f4-eb427cb36b46 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.096808] env[62684]: DEBUG oslo_vmware.api [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1952.096808] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52941fb9-55b4-043f-77be-1a9379443344" [ 1952.096808] env[62684]: _type = "Task" [ 1952.096808] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1952.107473] env[62684]: DEBUG oslo_vmware.api [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52941fb9-55b4-043f-77be-1a9379443344, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.111981] env[62684]: DEBUG nova.network.neutron [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Updating instance_info_cache with network_info: [{"id": "31109cc7-c34c-42b7-8245-d5d2e71162ad", "address": "fa:16:3e:c1:28:ca", "network": {"id": "9fdd139d-ff81-495c-bade-b460b26ae2e8", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1131405815-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e026ed8417b84dcaa31ef7b09997faf1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31109cc7-c3", "ovs_interfaceid": "31109cc7-c34c-42b7-8245-d5d2e71162ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1952.257499] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052914, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.449742} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1952.257783] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] df93c57e-716c-4c73-b551-9079a523ea0b/df93c57e-716c-4c73-b551-9079a523ea0b.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1952.258008] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1952.258277] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-957982bb-77d2-4d28-872d-f553bf05f88c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.264994] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for the task: (returnval){ [ 1952.264994] env[62684]: value = "task-2052915" [ 1952.264994] env[62684]: _type = "Task" [ 1952.264994] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1952.272947] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052915, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.512211] env[62684]: DEBUG nova.compute.manager [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1952.536470] env[62684]: DEBUG nova.scheduler.client.report [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1952.550037] env[62684]: DEBUG nova.virt.hardware [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1952.550037] env[62684]: DEBUG nova.virt.hardware [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1952.550037] env[62684]: DEBUG nova.virt.hardware [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1952.550037] env[62684]: DEBUG nova.virt.hardware [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1952.550037] env[62684]: DEBUG nova.virt.hardware [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1952.550989] env[62684]: DEBUG nova.virt.hardware [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1952.551488] env[62684]: DEBUG nova.virt.hardware [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1952.551837] env[62684]: DEBUG nova.virt.hardware [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1952.552150] env[62684]: DEBUG nova.virt.hardware [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1952.552426] env[62684]: DEBUG nova.virt.hardware [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1952.552705] env[62684]: DEBUG nova.virt.hardware [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1952.554369] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d406ec36-1f66-4c60-be12-429f1323413f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.568138] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31c95981-6ad5-4e23-98d9-69f4dd361c36 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.612359] env[62684]: DEBUG oslo_vmware.api [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52941fb9-55b4-043f-77be-1a9379443344, 'name': SearchDatastore_Task, 'duration_secs': 0.045937} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1952.612959] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1952.613349] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1952.613730] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1952.613959] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1952.614317] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1952.615100] env[62684]: DEBUG oslo_concurrency.lockutils [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Releasing lock "refresh_cache-a3c7943e-7528-41bc-9a20-1e2b57f832e3" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1952.616184] env[62684]: DEBUG nova.compute.manager [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Instance network_info: |[{"id": "31109cc7-c34c-42b7-8245-d5d2e71162ad", "address": "fa:16:3e:c1:28:ca", "network": {"id": "9fdd139d-ff81-495c-bade-b460b26ae2e8", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1131405815-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e026ed8417b84dcaa31ef7b09997faf1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31109cc7-c3", "ovs_interfaceid": "31109cc7-c34c-42b7-8245-d5d2e71162ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1952.616184] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b2b3bb06-19ec-4ae5-ae0b-42c0461a7d05 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.618127] env[62684]: DEBUG oslo_concurrency.lockutils [req-eb315981-2041-47e9-9b89-a6a575643e29 req-6eb9f48f-745b-434d-86d4-4002b2487d13 service nova] Acquired lock "refresh_cache-a3c7943e-7528-41bc-9a20-1e2b57f832e3" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1952.618321] env[62684]: DEBUG nova.network.neutron [req-eb315981-2041-47e9-9b89-a6a575643e29 req-6eb9f48f-745b-434d-86d4-4002b2487d13 service nova] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Refreshing network info cache for port 31109cc7-c34c-42b7-8245-d5d2e71162ad {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1952.620031] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c1:28:ca', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b399c74-1411-408a-b4cd-84e268ae83fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '31109cc7-c34c-42b7-8245-d5d2e71162ad', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1952.627012] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Creating folder: Project (e026ed8417b84dcaa31ef7b09997faf1). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1952.627523] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8757a5db-18b7-4406-98f9-71f08659abae {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.636262] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1952.636367] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1952.638111] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12ce05b5-3252-4dba-9979-2436ab1508e8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.640430] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Created folder: Project (e026ed8417b84dcaa31ef7b09997faf1) in parent group-v421118. [ 1952.640632] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Creating folder: Instances. Parent ref: group-v421276. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1952.642473] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-02762ae9-20b9-4c10-bbc6-da539355e2ba {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.645609] env[62684]: DEBUG oslo_vmware.api [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1952.645609] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ac8f76-bb26-e2a9-a246-cedcee51c33a" [ 1952.645609] env[62684]: _type = "Task" [ 1952.645609] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1952.651024] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Created folder: Instances in parent group-v421276. [ 1952.651191] env[62684]: DEBUG oslo.service.loopingcall [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1952.652042] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1952.652042] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-53a36876-7d14-4857-aca0-5c51d426beb2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.669323] env[62684]: DEBUG oslo_vmware.api [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ac8f76-bb26-e2a9-a246-cedcee51c33a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.674704] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1952.674704] env[62684]: value = "task-2052918" [ 1952.674704] env[62684]: _type = "Task" [ 1952.674704] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1952.682342] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052918, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.776175] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052915, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067341} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1952.777065] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1952.777288] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7ebab3f-e92e-4667-844f-d7e26655faa7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.798877] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] df93c57e-716c-4c73-b551-9079a523ea0b/df93c57e-716c-4c73-b551-9079a523ea0b.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1952.799173] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7f9aba38-348f-41f1-b7a6-0bd381a74c36 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.820141] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for the task: (returnval){ [ 1952.820141] env[62684]: value = "task-2052919" [ 1952.820141] env[62684]: _type = "Task" [ 1952.820141] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1952.827909] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052919, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.042026] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.596s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1953.042719] env[62684]: DEBUG nova.compute.manager [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1953.045775] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.762s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1953.048211] env[62684]: INFO nova.compute.claims [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1953.125142] env[62684]: DEBUG nova.network.neutron [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Successfully updated port: d15c8cc5-3728-42eb-8a3a-e5b3390bf5dd {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1953.161180] env[62684]: DEBUG oslo_vmware.api [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ac8f76-bb26-e2a9-a246-cedcee51c33a, 'name': SearchDatastore_Task, 'duration_secs': 0.008666} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1953.161180] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39623231-1b6a-44dd-bad5-7e56f86f978f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.167072] env[62684]: DEBUG oslo_vmware.api [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1953.167072] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52264202-29ce-4945-bd9b-381840183e4a" [ 1953.167072] env[62684]: _type = "Task" [ 1953.167072] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1953.177251] env[62684]: DEBUG oslo_vmware.api [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52264202-29ce-4945-bd9b-381840183e4a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.185637] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052918, 'name': CreateVM_Task, 'duration_secs': 0.502649} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1953.187775] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1953.188143] env[62684]: DEBUG oslo_concurrency.lockutils [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1953.188353] env[62684]: DEBUG oslo_concurrency.lockutils [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1953.188707] env[62684]: DEBUG oslo_concurrency.lockutils [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1953.189272] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72233080-5162-461e-aad5-b4a53cd988a9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.193872] env[62684]: DEBUG oslo_vmware.api [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Waiting for the task: (returnval){ [ 1953.193872] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5202c1fe-2cf2-b8fa-b830-66666907f278" [ 1953.193872] env[62684]: _type = "Task" [ 1953.193872] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1953.202630] env[62684]: DEBUG oslo_vmware.api [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5202c1fe-2cf2-b8fa-b830-66666907f278, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.331727] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052919, 'name': ReconfigVM_Task, 'duration_secs': 0.301883} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1953.331902] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Reconfigured VM instance instance-00000036 to attach disk [datastore1] df93c57e-716c-4c73-b551-9079a523ea0b/df93c57e-716c-4c73-b551-9079a523ea0b.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1953.332747] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-be25c058-adc1-4f07-a850-68c2f0e3b7b4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.340194] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for the task: (returnval){ [ 1953.340194] env[62684]: value = "task-2052920" [ 1953.340194] env[62684]: _type = "Task" [ 1953.340194] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1953.349734] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052920, 'name': Rename_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.467390] env[62684]: DEBUG nova.network.neutron [req-eb315981-2041-47e9-9b89-a6a575643e29 req-6eb9f48f-745b-434d-86d4-4002b2487d13 service nova] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Updated VIF entry in instance network info cache for port 31109cc7-c34c-42b7-8245-d5d2e71162ad. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1953.467837] env[62684]: DEBUG nova.network.neutron [req-eb315981-2041-47e9-9b89-a6a575643e29 req-6eb9f48f-745b-434d-86d4-4002b2487d13 service nova] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Updating instance_info_cache with network_info: [{"id": "31109cc7-c34c-42b7-8245-d5d2e71162ad", "address": "fa:16:3e:c1:28:ca", "network": {"id": "9fdd139d-ff81-495c-bade-b460b26ae2e8", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1131405815-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e026ed8417b84dcaa31ef7b09997faf1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31109cc7-c3", "ovs_interfaceid": "31109cc7-c34c-42b7-8245-d5d2e71162ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1953.554354] env[62684]: DEBUG nova.compute.utils [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1953.557163] env[62684]: DEBUG nova.compute.manager [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Not allocating networking since 'none' was specified. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1953.628607] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Acquiring lock "refresh_cache-31419285-9fdf-4d37-94d7-d1b08c6b6b05" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1953.630020] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Acquired lock "refresh_cache-31419285-9fdf-4d37-94d7-d1b08c6b6b05" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1953.630200] env[62684]: DEBUG nova.network.neutron [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1953.677629] env[62684]: DEBUG oslo_vmware.api [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52264202-29ce-4945-bd9b-381840183e4a, 'name': SearchDatastore_Task, 'duration_secs': 0.00934} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1953.678172] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1953.678438] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] dcb0a5b2-379e-44ff-a9b0-be615943c94e/dcb0a5b2-379e-44ff-a9b0-be615943c94e.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1953.678768] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dfca9a21-17fd-4cca-90ed-cbb3d55cc00f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.691360] env[62684]: DEBUG oslo_vmware.api [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1953.691360] env[62684]: value = "task-2052921" [ 1953.691360] env[62684]: _type = "Task" [ 1953.691360] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1953.702188] env[62684]: DEBUG oslo_vmware.api [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052921, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.707868] env[62684]: DEBUG oslo_vmware.api [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5202c1fe-2cf2-b8fa-b830-66666907f278, 'name': SearchDatastore_Task, 'duration_secs': 0.008845} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1953.708329] env[62684]: DEBUG oslo_concurrency.lockutils [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1953.708587] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1953.708822] env[62684]: DEBUG oslo_concurrency.lockutils [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1953.708969] env[62684]: DEBUG oslo_concurrency.lockutils [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1953.709169] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1953.709441] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-428ec7b8-e8d4-4c8a-81da-954e8dbff607 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.728800] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1953.729024] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1953.729772] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69025ca2-416c-4437-a021-fe8d49663438 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.735207] env[62684]: DEBUG oslo_vmware.api [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Waiting for the task: (returnval){ [ 1953.735207] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5277cd71-b189-0d37-340f-c1995766702b" [ 1953.735207] env[62684]: _type = "Task" [ 1953.735207] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1953.743866] env[62684]: DEBUG oslo_vmware.api [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5277cd71-b189-0d37-340f-c1995766702b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.850569] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052920, 'name': Rename_Task, 'duration_secs': 0.148707} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1953.851023] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1953.851227] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-07f8ab0c-9d49-47a9-9c46-9b147f7ff66f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.858375] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for the task: (returnval){ [ 1953.858375] env[62684]: value = "task-2052922" [ 1953.858375] env[62684]: _type = "Task" [ 1953.858375] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1953.866996] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052922, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.925787] env[62684]: DEBUG nova.compute.manager [req-10607316-8453-4a6a-8bc6-5e1a4cf562ab req-2df224fc-70bd-4654-a8b6-bca2fd18129d service nova] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Received event network-vif-plugged-d15c8cc5-3728-42eb-8a3a-e5b3390bf5dd {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1953.927043] env[62684]: DEBUG oslo_concurrency.lockutils [req-10607316-8453-4a6a-8bc6-5e1a4cf562ab req-2df224fc-70bd-4654-a8b6-bca2fd18129d service nova] Acquiring lock "31419285-9fdf-4d37-94d7-d1b08c6b6b05-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1953.927470] env[62684]: DEBUG oslo_concurrency.lockutils [req-10607316-8453-4a6a-8bc6-5e1a4cf562ab req-2df224fc-70bd-4654-a8b6-bca2fd18129d service nova] Lock "31419285-9fdf-4d37-94d7-d1b08c6b6b05-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1953.927771] env[62684]: DEBUG oslo_concurrency.lockutils [req-10607316-8453-4a6a-8bc6-5e1a4cf562ab req-2df224fc-70bd-4654-a8b6-bca2fd18129d service nova] Lock "31419285-9fdf-4d37-94d7-d1b08c6b6b05-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1953.928150] env[62684]: DEBUG nova.compute.manager [req-10607316-8453-4a6a-8bc6-5e1a4cf562ab req-2df224fc-70bd-4654-a8b6-bca2fd18129d service nova] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] No waiting events found dispatching network-vif-plugged-d15c8cc5-3728-42eb-8a3a-e5b3390bf5dd {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1953.929376] env[62684]: WARNING nova.compute.manager [req-10607316-8453-4a6a-8bc6-5e1a4cf562ab req-2df224fc-70bd-4654-a8b6-bca2fd18129d service nova] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Received unexpected event network-vif-plugged-d15c8cc5-3728-42eb-8a3a-e5b3390bf5dd for instance with vm_state building and task_state spawning. [ 1953.929376] env[62684]: DEBUG nova.compute.manager [req-10607316-8453-4a6a-8bc6-5e1a4cf562ab req-2df224fc-70bd-4654-a8b6-bca2fd18129d service nova] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Received event network-changed-d15c8cc5-3728-42eb-8a3a-e5b3390bf5dd {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1953.929376] env[62684]: DEBUG nova.compute.manager [req-10607316-8453-4a6a-8bc6-5e1a4cf562ab req-2df224fc-70bd-4654-a8b6-bca2fd18129d service nova] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Refreshing instance network info cache due to event network-changed-d15c8cc5-3728-42eb-8a3a-e5b3390bf5dd. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1953.929376] env[62684]: DEBUG oslo_concurrency.lockutils [req-10607316-8453-4a6a-8bc6-5e1a4cf562ab req-2df224fc-70bd-4654-a8b6-bca2fd18129d service nova] Acquiring lock "refresh_cache-31419285-9fdf-4d37-94d7-d1b08c6b6b05" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1953.970749] env[62684]: DEBUG oslo_concurrency.lockutils [req-eb315981-2041-47e9-9b89-a6a575643e29 req-6eb9f48f-745b-434d-86d4-4002b2487d13 service nova] Releasing lock "refresh_cache-a3c7943e-7528-41bc-9a20-1e2b57f832e3" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1954.062648] env[62684]: DEBUG nova.compute.manager [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1954.196129] env[62684]: DEBUG nova.network.neutron [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1954.204460] env[62684]: DEBUG oslo_vmware.api [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052921, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.504592} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1954.207224] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] dcb0a5b2-379e-44ff-a9b0-be615943c94e/dcb0a5b2-379e-44ff-a9b0-be615943c94e.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1954.207469] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1954.207925] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fd092e71-47fe-4685-bdb6-8c73e64a0f48 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.218769] env[62684]: DEBUG oslo_vmware.api [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1954.218769] env[62684]: value = "task-2052923" [ 1954.218769] env[62684]: _type = "Task" [ 1954.218769] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1954.230592] env[62684]: DEBUG oslo_vmware.api [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052923, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.247329] env[62684]: DEBUG oslo_vmware.api [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5277cd71-b189-0d37-340f-c1995766702b, 'name': SearchDatastore_Task, 'duration_secs': 0.008522} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1954.252093] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d56c160-a247-46ce-bbb5-b1df62365187 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.262176] env[62684]: DEBUG oslo_vmware.api [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Waiting for the task: (returnval){ [ 1954.262176] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52bc4d1d-800d-0cc1-e505-7e0960e13a40" [ 1954.262176] env[62684]: _type = "Task" [ 1954.262176] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1954.272373] env[62684]: DEBUG oslo_vmware.api [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52bc4d1d-800d-0cc1-e505-7e0960e13a40, 'name': SearchDatastore_Task, 'duration_secs': 0.009212} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1954.272497] env[62684]: DEBUG oslo_concurrency.lockutils [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1954.272702] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] a3c7943e-7528-41bc-9a20-1e2b57f832e3/a3c7943e-7528-41bc-9a20-1e2b57f832e3.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1954.273362] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b14cc109-dc3d-4e72-a933-ca08e706492c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.279462] env[62684]: DEBUG oslo_vmware.api [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Waiting for the task: (returnval){ [ 1954.279462] env[62684]: value = "task-2052924" [ 1954.279462] env[62684]: _type = "Task" [ 1954.279462] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1954.293495] env[62684]: DEBUG oslo_vmware.api [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Task: {'id': task-2052924, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.371788] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052922, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.503230] env[62684]: DEBUG nova.network.neutron [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Updating instance_info_cache with network_info: [{"id": "d15c8cc5-3728-42eb-8a3a-e5b3390bf5dd", "address": "fa:16:3e:6b:13:58", "network": {"id": "88f09773-de83-4126-80be-f9d85222a3aa", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1417499811-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9473d6611ded48cd9e9d10a6f0bf1a3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd15c8cc5-37", "ovs_interfaceid": "d15c8cc5-3728-42eb-8a3a-e5b3390bf5dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1954.632231] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1611d828-2734-489e-8523-6cefa26fafdc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.639721] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-683582df-1719-471e-972f-c957f767dabc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.677659] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7b59a31-0d8c-47f9-ba70-3ad92942dcd3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.685948] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-720ebc33-a07e-47b4-aab9-b0ca46e29446 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.702134] env[62684]: DEBUG nova.compute.provider_tree [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1954.727513] env[62684]: DEBUG oslo_vmware.api [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052923, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06087} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1954.728044] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1954.728662] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83e10bfd-7dd3-4d45-a982-c1587698521f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.752732] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Reconfiguring VM instance instance-0000000f to attach disk [datastore1] dcb0a5b2-379e-44ff-a9b0-be615943c94e/dcb0a5b2-379e-44ff-a9b0-be615943c94e.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1954.752732] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1b8d621d-b6f7-4a3c-8800-399aceaca8cd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.774707] env[62684]: DEBUG oslo_vmware.api [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1954.774707] env[62684]: value = "task-2052925" [ 1954.774707] env[62684]: _type = "Task" [ 1954.774707] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1954.782701] env[62684]: DEBUG oslo_vmware.api [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052925, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.793131] env[62684]: DEBUG oslo_vmware.api [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Task: {'id': task-2052924, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.439232} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1954.793131] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] a3c7943e-7528-41bc-9a20-1e2b57f832e3/a3c7943e-7528-41bc-9a20-1e2b57f832e3.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1954.793131] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1954.793131] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a9cced18-bd1d-4254-82f7-87a510ca66d2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.799318] env[62684]: DEBUG oslo_vmware.api [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Waiting for the task: (returnval){ [ 1954.799318] env[62684]: value = "task-2052926" [ 1954.799318] env[62684]: _type = "Task" [ 1954.799318] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1954.807012] env[62684]: DEBUG oslo_vmware.api [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Task: {'id': task-2052926, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.872925] env[62684]: DEBUG oslo_vmware.api [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052922, 'name': PowerOnVM_Task, 'duration_secs': 0.584194} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1954.873212] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1954.873419] env[62684]: INFO nova.compute.manager [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Took 8.80 seconds to spawn the instance on the hypervisor. [ 1954.873604] env[62684]: DEBUG nova.compute.manager [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1954.874433] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d921915d-0db8-496c-95cd-ba75d1f5fbfb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.006612] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Releasing lock "refresh_cache-31419285-9fdf-4d37-94d7-d1b08c6b6b05" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1955.007252] env[62684]: DEBUG nova.compute.manager [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Instance network_info: |[{"id": "d15c8cc5-3728-42eb-8a3a-e5b3390bf5dd", "address": "fa:16:3e:6b:13:58", "network": {"id": "88f09773-de83-4126-80be-f9d85222a3aa", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1417499811-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9473d6611ded48cd9e9d10a6f0bf1a3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd15c8cc5-37", "ovs_interfaceid": "d15c8cc5-3728-42eb-8a3a-e5b3390bf5dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1955.007635] env[62684]: DEBUG oslo_concurrency.lockutils [req-10607316-8453-4a6a-8bc6-5e1a4cf562ab req-2df224fc-70bd-4654-a8b6-bca2fd18129d service nova] Acquired lock "refresh_cache-31419285-9fdf-4d37-94d7-d1b08c6b6b05" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1955.007890] env[62684]: DEBUG nova.network.neutron [req-10607316-8453-4a6a-8bc6-5e1a4cf562ab req-2df224fc-70bd-4654-a8b6-bca2fd18129d service nova] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Refreshing network info cache for port d15c8cc5-3728-42eb-8a3a-e5b3390bf5dd {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1955.009232] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6b:13:58', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cae1d6a8-cbba-4bbf-af10-ba5467340475', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd15c8cc5-3728-42eb-8a3a-e5b3390bf5dd', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1955.018139] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Creating folder: Project (9473d6611ded48cd9e9d10a6f0bf1a3c). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1955.019288] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b656607d-6a41-4a05-83f1-a3c84b50ff3f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.032714] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Created folder: Project (9473d6611ded48cd9e9d10a6f0bf1a3c) in parent group-v421118. [ 1955.032913] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Creating folder: Instances. Parent ref: group-v421279. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1955.033210] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9335cc20-69c2-4a0f-8dd4-7fe52af556a0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.044039] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Created folder: Instances in parent group-v421279. [ 1955.044131] env[62684]: DEBUG oslo.service.loopingcall [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1955.044364] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1955.044906] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-42a1bc70-8a0c-4615-adbf-ab9aa07b1726 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.063131] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1955.063131] env[62684]: value = "task-2052929" [ 1955.063131] env[62684]: _type = "Task" [ 1955.063131] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1955.070318] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052929, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.071384] env[62684]: DEBUG nova.compute.manager [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1955.098229] env[62684]: DEBUG nova.virt.hardware [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1955.098491] env[62684]: DEBUG nova.virt.hardware [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1955.098684] env[62684]: DEBUG nova.virt.hardware [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1955.098901] env[62684]: DEBUG nova.virt.hardware [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1955.099081] env[62684]: DEBUG nova.virt.hardware [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1955.099241] env[62684]: DEBUG nova.virt.hardware [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1955.099456] env[62684]: DEBUG nova.virt.hardware [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1955.099623] env[62684]: DEBUG nova.virt.hardware [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1955.099838] env[62684]: DEBUG nova.virt.hardware [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1955.099954] env[62684]: DEBUG nova.virt.hardware [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1955.100147] env[62684]: DEBUG nova.virt.hardware [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1955.100985] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-185bc472-c6ec-41c6-ae69-43c5c23e5d3a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.108346] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4b8e508-8d8d-48b8-bd39-ea2b891adc47 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.121978] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Instance VIF info [] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1955.127427] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Creating folder: Project (0c5619d2ad7b495683cd2ce9b69bfa0c). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1955.127729] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7c8e2ad3-fb56-47fb-9ba9-9d7c18f50986 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.136473] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Created folder: Project (0c5619d2ad7b495683cd2ce9b69bfa0c) in parent group-v421118. [ 1955.137730] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Creating folder: Instances. Parent ref: group-v421282. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1955.137730] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2d171fd1-1a8f-444d-8f67-b1135a886c12 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.145828] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Created folder: Instances in parent group-v421282. [ 1955.146073] env[62684]: DEBUG oslo.service.loopingcall [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1955.146304] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1955.146521] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ee4cda39-190b-4c26-acb6-ff771088cf60 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.162458] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1955.162458] env[62684]: value = "task-2052932" [ 1955.162458] env[62684]: _type = "Task" [ 1955.162458] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1955.170013] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052932, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.205111] env[62684]: DEBUG nova.scheduler.client.report [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1955.285749] env[62684]: DEBUG oslo_vmware.api [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052925, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.309711] env[62684]: DEBUG oslo_vmware.api [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Task: {'id': task-2052926, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071833} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1955.309989] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1955.310883] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceb46e94-ec09-4272-833e-15bdd2e00bd0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.338779] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] a3c7943e-7528-41bc-9a20-1e2b57f832e3/a3c7943e-7528-41bc-9a20-1e2b57f832e3.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1955.338779] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bbec5204-d68d-4208-b101-21d0acba6503 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.362110] env[62684]: DEBUG oslo_vmware.api [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Waiting for the task: (returnval){ [ 1955.362110] env[62684]: value = "task-2052933" [ 1955.362110] env[62684]: _type = "Task" [ 1955.362110] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1955.373935] env[62684]: DEBUG oslo_vmware.api [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Task: {'id': task-2052933, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.396542] env[62684]: INFO nova.compute.manager [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Took 61.24 seconds to build instance. [ 1955.434073] env[62684]: DEBUG oslo_concurrency.lockutils [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Acquiring lock "548df581-073b-41d4-bcbe-df7342a2beca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1955.434073] env[62684]: DEBUG oslo_concurrency.lockutils [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Lock "548df581-073b-41d4-bcbe-df7342a2beca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1955.573581] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052929, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.674567] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052932, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.713734] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.668s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1955.714489] env[62684]: DEBUG nova.compute.manager [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1955.720743] env[62684]: DEBUG oslo_concurrency.lockutils [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 38.971s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1955.785720] env[62684]: DEBUG oslo_vmware.api [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052925, 'name': ReconfigVM_Task, 'duration_secs': 1.005973} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1955.786223] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Reconfigured VM instance instance-0000000f to attach disk [datastore1] dcb0a5b2-379e-44ff-a9b0-be615943c94e/dcb0a5b2-379e-44ff-a9b0-be615943c94e.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1955.786891] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6ab85178-42dd-4f70-a643-20b2562cd52f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.793983] env[62684]: DEBUG oslo_vmware.api [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1955.793983] env[62684]: value = "task-2052934" [ 1955.793983] env[62684]: _type = "Task" [ 1955.793983] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1955.802566] env[62684]: DEBUG oslo_vmware.api [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052934, 'name': Rename_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.845991] env[62684]: DEBUG nova.network.neutron [req-10607316-8453-4a6a-8bc6-5e1a4cf562ab req-2df224fc-70bd-4654-a8b6-bca2fd18129d service nova] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Updated VIF entry in instance network info cache for port d15c8cc5-3728-42eb-8a3a-e5b3390bf5dd. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1955.846418] env[62684]: DEBUG nova.network.neutron [req-10607316-8453-4a6a-8bc6-5e1a4cf562ab req-2df224fc-70bd-4654-a8b6-bca2fd18129d service nova] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Updating instance_info_cache with network_info: [{"id": "d15c8cc5-3728-42eb-8a3a-e5b3390bf5dd", "address": "fa:16:3e:6b:13:58", "network": {"id": "88f09773-de83-4126-80be-f9d85222a3aa", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1417499811-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9473d6611ded48cd9e9d10a6f0bf1a3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd15c8cc5-37", "ovs_interfaceid": "d15c8cc5-3728-42eb-8a3a-e5b3390bf5dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1955.873967] env[62684]: DEBUG oslo_vmware.api [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Task: {'id': task-2052933, 'name': ReconfigVM_Task, 'duration_secs': 0.45262} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1955.874306] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Reconfigured VM instance instance-00000037 to attach disk [datastore1] a3c7943e-7528-41bc-9a20-1e2b57f832e3/a3c7943e-7528-41bc-9a20-1e2b57f832e3.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1955.874950] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5ab7fb83-351b-4c64-80c7-ce058acb7292 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.884023] env[62684]: DEBUG oslo_vmware.api [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Waiting for the task: (returnval){ [ 1955.884023] env[62684]: value = "task-2052935" [ 1955.884023] env[62684]: _type = "Task" [ 1955.884023] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1955.892959] env[62684]: DEBUG oslo_vmware.api [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Task: {'id': task-2052935, 'name': Rename_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.899056] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b8c9b18-8ec0-4cd2-bc7f-440fc2bfcb3f tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lock "df93c57e-716c-4c73-b551-9079a523ea0b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.596s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1956.007365] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dd10f84f-2d9f-4769-84c7-47e10fb7e40a tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquiring lock "b009f710-1a94-4113-8feb-7cc5dd6a6519" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1956.007365] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dd10f84f-2d9f-4769-84c7-47e10fb7e40a tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lock "b009f710-1a94-4113-8feb-7cc5dd6a6519" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1956.007365] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dd10f84f-2d9f-4769-84c7-47e10fb7e40a tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquiring lock "b009f710-1a94-4113-8feb-7cc5dd6a6519-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1956.007365] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dd10f84f-2d9f-4769-84c7-47e10fb7e40a tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lock "b009f710-1a94-4113-8feb-7cc5dd6a6519-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1956.007365] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dd10f84f-2d9f-4769-84c7-47e10fb7e40a tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lock "b009f710-1a94-4113-8feb-7cc5dd6a6519-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1956.008617] env[62684]: INFO nova.compute.manager [None req-dd10f84f-2d9f-4769-84c7-47e10fb7e40a tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Terminating instance [ 1956.012417] env[62684]: DEBUG nova.compute.manager [None req-dd10f84f-2d9f-4769-84c7-47e10fb7e40a tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1956.012628] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-dd10f84f-2d9f-4769-84c7-47e10fb7e40a tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1956.013982] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2867847-24d4-46db-9d96-2a30273f4255 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.024777] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd10f84f-2d9f-4769-84c7-47e10fb7e40a tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1956.026997] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2f91f66b-d7f9-4735-8897-47034c4cea50 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.033039] env[62684]: DEBUG oslo_vmware.api [None req-dd10f84f-2d9f-4769-84c7-47e10fb7e40a tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for the task: (returnval){ [ 1956.033039] env[62684]: value = "task-2052936" [ 1956.033039] env[62684]: _type = "Task" [ 1956.033039] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1956.042480] env[62684]: DEBUG oslo_vmware.api [None req-dd10f84f-2d9f-4769-84c7-47e10fb7e40a tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052936, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.076380] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052929, 'name': CreateVM_Task, 'duration_secs': 0.772764} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.076380] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1956.077546] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1956.077653] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1956.077928] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1956.078224] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1e4b245-0d9c-4893-8caf-516ca1e93504 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.080063] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b5a2beb-7d61-4a57-8d93-21a2a7d36500 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquiring lock "df93c57e-716c-4c73-b551-9079a523ea0b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1956.080283] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b5a2beb-7d61-4a57-8d93-21a2a7d36500 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lock "df93c57e-716c-4c73-b551-9079a523ea0b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1956.080577] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b5a2beb-7d61-4a57-8d93-21a2a7d36500 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquiring lock "df93c57e-716c-4c73-b551-9079a523ea0b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1956.081144] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b5a2beb-7d61-4a57-8d93-21a2a7d36500 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lock "df93c57e-716c-4c73-b551-9079a523ea0b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1956.081144] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b5a2beb-7d61-4a57-8d93-21a2a7d36500 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lock "df93c57e-716c-4c73-b551-9079a523ea0b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1956.082872] env[62684]: INFO nova.compute.manager [None req-4b5a2beb-7d61-4a57-8d93-21a2a7d36500 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Terminating instance [ 1956.084891] env[62684]: DEBUG nova.compute.manager [None req-4b5a2beb-7d61-4a57-8d93-21a2a7d36500 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1956.085103] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4b5a2beb-7d61-4a57-8d93-21a2a7d36500 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1956.086252] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58062c19-198a-4c52-8522-7f4c5469c9a8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.093129] env[62684]: DEBUG oslo_vmware.api [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Waiting for the task: (returnval){ [ 1956.093129] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5271cdfe-ac79-8e08-13f8-bc0902e05fbe" [ 1956.093129] env[62684]: _type = "Task" [ 1956.093129] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1956.099353] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b5a2beb-7d61-4a57-8d93-21a2a7d36500 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1956.099998] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d4de62f4-f010-4ff9-88df-e698c6b1b740 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.105763] env[62684]: DEBUG oslo_vmware.api [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5271cdfe-ac79-8e08-13f8-bc0902e05fbe, 'name': SearchDatastore_Task, 'duration_secs': 0.009218} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.106763] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1956.106763] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1956.107010] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1956.107186] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1956.107368] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1956.107623] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f1ea7f85-52ed-47aa-ace2-7f61a547782c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.110561] env[62684]: DEBUG oslo_vmware.api [None req-4b5a2beb-7d61-4a57-8d93-21a2a7d36500 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for the task: (returnval){ [ 1956.110561] env[62684]: value = "task-2052937" [ 1956.110561] env[62684]: _type = "Task" [ 1956.110561] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1956.117895] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1956.118154] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1956.124091] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3456078-136e-433b-8426-9e8b46956224 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.127684] env[62684]: DEBUG oslo_vmware.api [None req-4b5a2beb-7d61-4a57-8d93-21a2a7d36500 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052937, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.130869] env[62684]: DEBUG oslo_vmware.api [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Waiting for the task: (returnval){ [ 1956.130869] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a1f963-8b0c-f128-3072-4f3e379306e1" [ 1956.130869] env[62684]: _type = "Task" [ 1956.130869] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1956.139186] env[62684]: DEBUG oslo_vmware.api [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a1f963-8b0c-f128-3072-4f3e379306e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.177035] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052932, 'name': CreateVM_Task, 'duration_secs': 0.672774} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.177035] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1956.177035] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1956.177035] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1956.177035] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1956.177035] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ecc0298-e328-471e-8dfb-d8689e33eec5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.181705] env[62684]: DEBUG oslo_vmware.api [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Waiting for the task: (returnval){ [ 1956.181705] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]526a4a56-384d-0c70-4cda-daf1a4f29d70" [ 1956.181705] env[62684]: _type = "Task" [ 1956.181705] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1956.192015] env[62684]: DEBUG oslo_vmware.api [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]526a4a56-384d-0c70-4cda-daf1a4f29d70, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.225642] env[62684]: INFO nova.compute.claims [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1956.230988] env[62684]: DEBUG nova.compute.utils [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1956.232691] env[62684]: DEBUG nova.compute.manager [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1956.232857] env[62684]: DEBUG nova.network.neutron [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1956.303619] env[62684]: DEBUG oslo_vmware.api [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052934, 'name': Rename_Task, 'duration_secs': 0.292288} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.303619] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1956.303802] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d72def27-bdba-4d4f-a5cb-1802a9a2bfcd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.309245] env[62684]: DEBUG oslo_vmware.api [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1956.309245] env[62684]: value = "task-2052938" [ 1956.309245] env[62684]: _type = "Task" [ 1956.309245] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1956.319427] env[62684]: DEBUG oslo_vmware.api [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052938, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.325039] env[62684]: DEBUG nova.policy [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6e8b54745b53458eafe4d911d7d6d7d1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6c54f74085f343d2b790145b0d82a9f8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1956.349270] env[62684]: DEBUG oslo_concurrency.lockutils [req-10607316-8453-4a6a-8bc6-5e1a4cf562ab req-2df224fc-70bd-4654-a8b6-bca2fd18129d service nova] Releasing lock "refresh_cache-31419285-9fdf-4d37-94d7-d1b08c6b6b05" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1956.393800] env[62684]: DEBUG oslo_vmware.api [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Task: {'id': task-2052935, 'name': Rename_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.401390] env[62684]: DEBUG nova.compute.manager [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1956.544368] env[62684]: DEBUG oslo_vmware.api [None req-dd10f84f-2d9f-4769-84c7-47e10fb7e40a tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052936, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.624214] env[62684]: DEBUG oslo_vmware.api [None req-4b5a2beb-7d61-4a57-8d93-21a2a7d36500 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052937, 'name': PowerOffVM_Task, 'duration_secs': 0.160732} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.625122] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b5a2beb-7d61-4a57-8d93-21a2a7d36500 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1956.625122] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4b5a2beb-7d61-4a57-8d93-21a2a7d36500 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1956.625122] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fa03b4ba-2060-489e-b434-2bbc94fd33ac {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.644314] env[62684]: DEBUG oslo_vmware.api [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a1f963-8b0c-f128-3072-4f3e379306e1, 'name': SearchDatastore_Task, 'duration_secs': 0.008406} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.645127] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-781c72c1-c055-4e95-bb9a-a0ca67a6972a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.650674] env[62684]: DEBUG oslo_vmware.api [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Waiting for the task: (returnval){ [ 1956.650674] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523acdd2-d6a7-8fdf-7371-059ddf49501b" [ 1956.650674] env[62684]: _type = "Task" [ 1956.650674] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1956.662156] env[62684]: DEBUG oslo_vmware.api [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523acdd2-d6a7-8fdf-7371-059ddf49501b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.686802] env[62684]: DEBUG nova.network.neutron [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Successfully created port: bf2ef9b1-19b4-40eb-a403-401532281c03 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1956.694243] env[62684]: DEBUG oslo_vmware.api [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]526a4a56-384d-0c70-4cda-daf1a4f29d70, 'name': SearchDatastore_Task, 'duration_secs': 0.008753} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.694573] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1956.694807] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1956.695029] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1956.726298] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4b5a2beb-7d61-4a57-8d93-21a2a7d36500 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1956.726563] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4b5a2beb-7d61-4a57-8d93-21a2a7d36500 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1956.726751] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b5a2beb-7d61-4a57-8d93-21a2a7d36500 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Deleting the datastore file [datastore1] df93c57e-716c-4c73-b551-9079a523ea0b {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1956.727054] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d2947fb5-0f48-42c9-b2ce-491e98b7a970 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.735500] env[62684]: INFO nova.compute.resource_tracker [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Updating resource usage from migration 079cb72a-143d-49f7-91bd-12a1ad5c9e3e [ 1956.738283] env[62684]: DEBUG oslo_vmware.api [None req-4b5a2beb-7d61-4a57-8d93-21a2a7d36500 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for the task: (returnval){ [ 1956.738283] env[62684]: value = "task-2052940" [ 1956.738283] env[62684]: _type = "Task" [ 1956.738283] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1956.738926] env[62684]: DEBUG nova.compute.manager [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1956.753155] env[62684]: DEBUG oslo_vmware.api [None req-4b5a2beb-7d61-4a57-8d93-21a2a7d36500 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052940, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.821787] env[62684]: DEBUG oslo_vmware.api [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052938, 'name': PowerOnVM_Task, 'duration_secs': 0.45009} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.821787] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1956.822059] env[62684]: DEBUG nova.compute.manager [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1956.823360] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19574157-b22c-4734-bb23-52ffe6ef4c25 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.897046] env[62684]: DEBUG oslo_vmware.api [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Task: {'id': task-2052935, 'name': Rename_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.921665] env[62684]: DEBUG oslo_concurrency.lockutils [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1957.045593] env[62684]: DEBUG oslo_vmware.api [None req-dd10f84f-2d9f-4769-84c7-47e10fb7e40a tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052936, 'name': PowerOffVM_Task, 'duration_secs': 0.903055} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1957.045875] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd10f84f-2d9f-4769-84c7-47e10fb7e40a tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1957.046075] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-dd10f84f-2d9f-4769-84c7-47e10fb7e40a tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1957.046389] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b721edbe-1386-44b3-87ca-890003b56d77 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.137902] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-dd10f84f-2d9f-4769-84c7-47e10fb7e40a tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1957.138164] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-dd10f84f-2d9f-4769-84c7-47e10fb7e40a tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1957.138352] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd10f84f-2d9f-4769-84c7-47e10fb7e40a tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Deleting the datastore file [datastore2] b009f710-1a94-4113-8feb-7cc5dd6a6519 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1957.141015] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8e1fc016-3d44-49c4-9131-770ebc5b4cae {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.145145] env[62684]: DEBUG oslo_vmware.api [None req-dd10f84f-2d9f-4769-84c7-47e10fb7e40a tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for the task: (returnval){ [ 1957.145145] env[62684]: value = "task-2052942" [ 1957.145145] env[62684]: _type = "Task" [ 1957.145145] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1957.156877] env[62684]: DEBUG oslo_vmware.api [None req-dd10f84f-2d9f-4769-84c7-47e10fb7e40a tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052942, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.162168] env[62684]: DEBUG oslo_vmware.api [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523acdd2-d6a7-8fdf-7371-059ddf49501b, 'name': SearchDatastore_Task, 'duration_secs': 0.025332} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1957.165132] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1957.165463] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 31419285-9fdf-4d37-94d7-d1b08c6b6b05/31419285-9fdf-4d37-94d7-d1b08c6b6b05.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1957.166713] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1957.166713] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1957.166713] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b82abdb1-963e-4858-af49-3e3e4db3ff75 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.168460] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c5225eca-69fe-42cd-b8ec-4b0e2588b8f7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.175385] env[62684]: DEBUG oslo_vmware.api [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Waiting for the task: (returnval){ [ 1957.175385] env[62684]: value = "task-2052943" [ 1957.175385] env[62684]: _type = "Task" [ 1957.175385] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1957.181838] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1957.182063] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1957.183293] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85ad9373-69da-4242-90c1-925ff2fc61c9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.191030] env[62684]: DEBUG oslo_vmware.api [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Task: {'id': task-2052943, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.193906] env[62684]: DEBUG oslo_vmware.api [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Waiting for the task: (returnval){ [ 1957.193906] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52535e79-91a3-6d52-5f4b-505e6574ca0a" [ 1957.193906] env[62684]: _type = "Task" [ 1957.193906] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1957.204019] env[62684]: DEBUG oslo_vmware.api [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52535e79-91a3-6d52-5f4b-505e6574ca0a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.220119] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17b3fc33-cfc7-427c-8969-21bcd26a8252 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.228416] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-947e6eac-e44c-48a1-8c0c-ea33b03fad8b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.268578] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a5109bd-835d-43bf-848f-dd34baa9637e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.280854] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-843c242b-efb6-4310-b5f0-7cb02f9903b5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.285066] env[62684]: DEBUG oslo_vmware.api [None req-4b5a2beb-7d61-4a57-8d93-21a2a7d36500 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052940, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.192405} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1957.285354] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b5a2beb-7d61-4a57-8d93-21a2a7d36500 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1957.285597] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4b5a2beb-7d61-4a57-8d93-21a2a7d36500 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1957.285803] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4b5a2beb-7d61-4a57-8d93-21a2a7d36500 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1957.286008] env[62684]: INFO nova.compute.manager [None req-4b5a2beb-7d61-4a57-8d93-21a2a7d36500 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1957.286298] env[62684]: DEBUG oslo.service.loopingcall [None req-4b5a2beb-7d61-4a57-8d93-21a2a7d36500 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1957.286865] env[62684]: DEBUG nova.compute.manager [-] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1957.287011] env[62684]: DEBUG nova.network.neutron [-] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1957.296988] env[62684]: DEBUG nova.compute.provider_tree [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1957.328995] env[62684]: INFO nova.compute.manager [None req-4400f1a2-64cf-4f55-822e-2ab68c20487c tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Get console output [ 1957.329501] env[62684]: WARNING nova.virt.vmwareapi.driver [None req-4400f1a2-64cf-4f55-822e-2ab68c20487c tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: effc673a-103f-413b-88ac-6907ad1ee852] The console log is missing. Check your VSPC configuration [ 1957.340583] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1957.394218] env[62684]: DEBUG oslo_vmware.api [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Task: {'id': task-2052935, 'name': Rename_Task, 'duration_secs': 1.01864} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1957.394509] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1957.394761] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b3c8fdce-5edd-427c-8fa2-852dea7d59ce {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.401297] env[62684]: DEBUG oslo_vmware.api [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Waiting for the task: (returnval){ [ 1957.401297] env[62684]: value = "task-2052944" [ 1957.401297] env[62684]: _type = "Task" [ 1957.401297] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1957.413109] env[62684]: DEBUG oslo_vmware.api [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Task: {'id': task-2052944, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.658567] env[62684]: DEBUG oslo_vmware.api [None req-dd10f84f-2d9f-4769-84c7-47e10fb7e40a tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Task: {'id': task-2052942, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.180993} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1957.659392] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd10f84f-2d9f-4769-84c7-47e10fb7e40a tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1957.659392] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-dd10f84f-2d9f-4769-84c7-47e10fb7e40a tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1957.659392] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-dd10f84f-2d9f-4769-84c7-47e10fb7e40a tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1957.659550] env[62684]: INFO nova.compute.manager [None req-dd10f84f-2d9f-4769-84c7-47e10fb7e40a tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1957.659769] env[62684]: DEBUG oslo.service.loopingcall [None req-dd10f84f-2d9f-4769-84c7-47e10fb7e40a tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1957.660060] env[62684]: DEBUG nova.compute.manager [-] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1957.660160] env[62684]: DEBUG nova.network.neutron [-] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1957.689831] env[62684]: DEBUG oslo_vmware.api [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Task: {'id': task-2052943, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.709412] env[62684]: DEBUG oslo_vmware.api [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52535e79-91a3-6d52-5f4b-505e6574ca0a, 'name': SearchDatastore_Task, 'duration_secs': 0.009516} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1957.709412] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9fd68184-3828-45a2-9117-b26117152275 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.717319] env[62684]: DEBUG oslo_vmware.api [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Waiting for the task: (returnval){ [ 1957.717319] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520c325b-8123-d671-a287-d35b4d21a567" [ 1957.717319] env[62684]: _type = "Task" [ 1957.717319] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1957.728591] env[62684]: DEBUG oslo_vmware.api [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520c325b-8123-d671-a287-d35b4d21a567, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.774901] env[62684]: DEBUG nova.compute.manager [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1957.801363] env[62684]: DEBUG nova.virt.hardware [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1957.801644] env[62684]: DEBUG nova.virt.hardware [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1957.801809] env[62684]: DEBUG nova.virt.hardware [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1957.802017] env[62684]: DEBUG nova.virt.hardware [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1957.802185] env[62684]: DEBUG nova.virt.hardware [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1957.802341] env[62684]: DEBUG nova.virt.hardware [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1957.802553] env[62684]: DEBUG nova.virt.hardware [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1957.802718] env[62684]: DEBUG nova.virt.hardware [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1957.802890] env[62684]: DEBUG nova.virt.hardware [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1957.803078] env[62684]: DEBUG nova.virt.hardware [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1957.803268] env[62684]: DEBUG nova.virt.hardware [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1957.804155] env[62684]: DEBUG nova.scheduler.client.report [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1957.808105] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5778c88e-db4c-4fd0-b95f-3477a69b331d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.816320] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41b39189-fb67-41bf-94ae-017b63a39827 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.912038] env[62684]: DEBUG oslo_vmware.api [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Task: {'id': task-2052944, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.914421] env[62684]: DEBUG nova.compute.manager [req-7eaee993-68f1-4be2-9a7d-24927f673dda req-0cf2a6f0-9956-43ff-aa07-24f4d6465c92 service nova] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Received event network-vif-deleted-30f49652-7f4b-41c8-8bbd-33a9e6b81e7d {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1957.914654] env[62684]: INFO nova.compute.manager [req-7eaee993-68f1-4be2-9a7d-24927f673dda req-0cf2a6f0-9956-43ff-aa07-24f4d6465c92 service nova] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Neutron deleted interface 30f49652-7f4b-41c8-8bbd-33a9e6b81e7d; detaching it from the instance and deleting it from the info cache [ 1957.914864] env[62684]: DEBUG nova.network.neutron [req-7eaee993-68f1-4be2-9a7d-24927f673dda req-0cf2a6f0-9956-43ff-aa07-24f4d6465c92 service nova] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1958.047769] env[62684]: DEBUG nova.network.neutron [-] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1958.190023] env[62684]: DEBUG oslo_vmware.api [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Task: {'id': task-2052943, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.574992} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1958.190023] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 31419285-9fdf-4d37-94d7-d1b08c6b6b05/31419285-9fdf-4d37-94d7-d1b08c6b6b05.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1958.191024] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1958.191179] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a7de3420-bd1b-456e-970d-c2d0e8940424 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.200198] env[62684]: DEBUG oslo_vmware.api [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Waiting for the task: (returnval){ [ 1958.200198] env[62684]: value = "task-2052945" [ 1958.200198] env[62684]: _type = "Task" [ 1958.200198] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1958.208070] env[62684]: DEBUG oslo_vmware.api [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Task: {'id': task-2052945, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.228227] env[62684]: DEBUG oslo_vmware.api [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520c325b-8123-d671-a287-d35b4d21a567, 'name': SearchDatastore_Task, 'duration_secs': 0.021722} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1958.228740] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1958.229187] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 18a97088-fffa-4b77-8ab0-d24f6f84f516/18a97088-fffa-4b77-8ab0-d24f6f84f516.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1958.231037] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-675c236e-13ae-4b3d-9574-39752be8709a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.236570] env[62684]: DEBUG oslo_vmware.api [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Waiting for the task: (returnval){ [ 1958.236570] env[62684]: value = "task-2052946" [ 1958.236570] env[62684]: _type = "Task" [ 1958.236570] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1958.244901] env[62684]: DEBUG oslo_vmware.api [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Task: {'id': task-2052946, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.315028] env[62684]: DEBUG oslo_concurrency.lockutils [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.592s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1958.315028] env[62684]: INFO nova.compute.manager [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Migrating [ 1958.319965] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.558s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1958.321858] env[62684]: INFO nova.compute.claims [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1958.393468] env[62684]: DEBUG nova.network.neutron [-] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1958.414266] env[62684]: DEBUG oslo_vmware.api [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Task: {'id': task-2052944, 'name': PowerOnVM_Task, 'duration_secs': 0.618372} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1958.415455] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1958.416127] env[62684]: INFO nova.compute.manager [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Took 8.49 seconds to spawn the instance on the hypervisor. [ 1958.416397] env[62684]: DEBUG nova.compute.manager [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1958.418128] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27c66ec8-6cde-43ae-a8a7-e44c54f010f5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.422170] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dd5da313-ab35-4592-968a-c8f35c50df88 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.445725] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-809524bb-3387-46ce-baf5-7bcde20704f1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.493768] env[62684]: DEBUG nova.compute.manager [req-7eaee993-68f1-4be2-9a7d-24927f673dda req-0cf2a6f0-9956-43ff-aa07-24f4d6465c92 service nova] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Detach interface failed, port_id=30f49652-7f4b-41c8-8bbd-33a9e6b81e7d, reason: Instance df93c57e-716c-4c73-b551-9079a523ea0b could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1958.522579] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1654224f-ec70-4017-8ec2-cf8ca3fe2bac tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Acquiring lock "effc673a-103f-413b-88ac-6907ad1ee852" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1958.525418] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1654224f-ec70-4017-8ec2-cf8ca3fe2bac tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Lock "effc673a-103f-413b-88ac-6907ad1ee852" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1958.525418] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1654224f-ec70-4017-8ec2-cf8ca3fe2bac tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Acquiring lock "effc673a-103f-413b-88ac-6907ad1ee852-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1958.525418] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1654224f-ec70-4017-8ec2-cf8ca3fe2bac tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Lock "effc673a-103f-413b-88ac-6907ad1ee852-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1958.525418] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1654224f-ec70-4017-8ec2-cf8ca3fe2bac tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Lock "effc673a-103f-413b-88ac-6907ad1ee852-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1958.528192] env[62684]: INFO nova.compute.manager [None req-1654224f-ec70-4017-8ec2-cf8ca3fe2bac tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Terminating instance [ 1958.529837] env[62684]: DEBUG nova.compute.manager [None req-1654224f-ec70-4017-8ec2-cf8ca3fe2bac tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1958.530391] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-1654224f-ec70-4017-8ec2-cf8ca3fe2bac tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1958.531407] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d052cc01-9da9-4b41-b9c2-45d28bf55c0e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.543452] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-1654224f-ec70-4017-8ec2-cf8ca3fe2bac tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1958.543452] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c6f84e03-40bc-451c-a2eb-e992f274e116 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.548988] env[62684]: INFO nova.compute.manager [-] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Took 1.26 seconds to deallocate network for instance. [ 1958.549654] env[62684]: DEBUG oslo_vmware.api [None req-1654224f-ec70-4017-8ec2-cf8ca3fe2bac tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Waiting for the task: (returnval){ [ 1958.549654] env[62684]: value = "task-2052947" [ 1958.549654] env[62684]: _type = "Task" [ 1958.549654] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1958.565022] env[62684]: DEBUG oslo_vmware.api [None req-1654224f-ec70-4017-8ec2-cf8ca3fe2bac tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': task-2052947, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.708278] env[62684]: DEBUG oslo_vmware.api [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Task: {'id': task-2052945, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068276} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1958.708625] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1958.709661] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f4f770c-d976-4762-ae28-2f862ab2091c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.731303] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] 31419285-9fdf-4d37-94d7-d1b08c6b6b05/31419285-9fdf-4d37-94d7-d1b08c6b6b05.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1958.732025] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1f58c69c-b22f-47b3-bcad-7c69eeaa675a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.755028] env[62684]: DEBUG oslo_vmware.api [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Task: {'id': task-2052946, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.47187} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1958.756288] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 18a97088-fffa-4b77-8ab0-d24f6f84f516/18a97088-fffa-4b77-8ab0-d24f6f84f516.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1958.756517] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1958.756832] env[62684]: DEBUG oslo_vmware.api [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Waiting for the task: (returnval){ [ 1958.756832] env[62684]: value = "task-2052948" [ 1958.756832] env[62684]: _type = "Task" [ 1958.756832] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1958.757036] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0bfc15e5-c00b-401a-b1b1-03830b39d916 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.767483] env[62684]: DEBUG oslo_vmware.api [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Task: {'id': task-2052948, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.768755] env[62684]: DEBUG oslo_vmware.api [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Waiting for the task: (returnval){ [ 1958.768755] env[62684]: value = "task-2052949" [ 1958.768755] env[62684]: _type = "Task" [ 1958.768755] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1958.776666] env[62684]: DEBUG oslo_vmware.api [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Task: {'id': task-2052949, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.833174] env[62684]: DEBUG oslo_concurrency.lockutils [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquiring lock "refresh_cache-02dc8c41-5092-4f84-9722-37d4df3a459a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1958.833174] env[62684]: DEBUG oslo_concurrency.lockutils [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquired lock "refresh_cache-02dc8c41-5092-4f84-9722-37d4df3a459a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1958.833174] env[62684]: DEBUG nova.network.neutron [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1958.863329] env[62684]: DEBUG nova.network.neutron [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Successfully updated port: bf2ef9b1-19b4-40eb-a403-401532281c03 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1958.896331] env[62684]: INFO nova.compute.manager [-] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Took 1.24 seconds to deallocate network for instance. [ 1958.912224] env[62684]: DEBUG nova.compute.manager [req-c32f0ec4-0cfe-40d3-9d80-e31f8f898365 req-6cf9b392-6075-4af1-9282-6158501beda5 service nova] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Received event network-vif-plugged-bf2ef9b1-19b4-40eb-a403-401532281c03 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1958.912224] env[62684]: DEBUG oslo_concurrency.lockutils [req-c32f0ec4-0cfe-40d3-9d80-e31f8f898365 req-6cf9b392-6075-4af1-9282-6158501beda5 service nova] Acquiring lock "a56a3fab-e491-44f5-9cf4-2c308138ffc4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1958.912545] env[62684]: DEBUG oslo_concurrency.lockutils [req-c32f0ec4-0cfe-40d3-9d80-e31f8f898365 req-6cf9b392-6075-4af1-9282-6158501beda5 service nova] Lock "a56a3fab-e491-44f5-9cf4-2c308138ffc4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1958.912545] env[62684]: DEBUG oslo_concurrency.lockutils [req-c32f0ec4-0cfe-40d3-9d80-e31f8f898365 req-6cf9b392-6075-4af1-9282-6158501beda5 service nova] Lock "a56a3fab-e491-44f5-9cf4-2c308138ffc4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1958.912655] env[62684]: DEBUG nova.compute.manager [req-c32f0ec4-0cfe-40d3-9d80-e31f8f898365 req-6cf9b392-6075-4af1-9282-6158501beda5 service nova] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] No waiting events found dispatching network-vif-plugged-bf2ef9b1-19b4-40eb-a403-401532281c03 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1958.914026] env[62684]: WARNING nova.compute.manager [req-c32f0ec4-0cfe-40d3-9d80-e31f8f898365 req-6cf9b392-6075-4af1-9282-6158501beda5 service nova] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Received unexpected event network-vif-plugged-bf2ef9b1-19b4-40eb-a403-401532281c03 for instance with vm_state building and task_state spawning. [ 1958.951590] env[62684]: INFO nova.compute.manager [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Took 63.28 seconds to build instance. [ 1959.062711] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b5a2beb-7d61-4a57-8d93-21a2a7d36500 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1959.063307] env[62684]: DEBUG oslo_vmware.api [None req-1654224f-ec70-4017-8ec2-cf8ca3fe2bac tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': task-2052947, 'name': PowerOffVM_Task, 'duration_secs': 0.353402} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1959.063634] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-1654224f-ec70-4017-8ec2-cf8ca3fe2bac tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1959.063874] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-1654224f-ec70-4017-8ec2-cf8ca3fe2bac tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1959.064286] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9407f97d-b9fb-4c22-a612-88b9eeff3b3f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.166015] env[62684]: DEBUG oslo_concurrency.lockutils [None req-062c4524-cc95-406e-a83d-db9432b1ca02 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquiring lock "e3dd1bc0-f292-4ac7-a8db-324887a18411" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1959.166732] env[62684]: DEBUG oslo_concurrency.lockutils [None req-062c4524-cc95-406e-a83d-db9432b1ca02 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "e3dd1bc0-f292-4ac7-a8db-324887a18411" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1959.166732] env[62684]: DEBUG oslo_concurrency.lockutils [None req-062c4524-cc95-406e-a83d-db9432b1ca02 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquiring lock "e3dd1bc0-f292-4ac7-a8db-324887a18411-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1959.167185] env[62684]: DEBUG oslo_concurrency.lockutils [None req-062c4524-cc95-406e-a83d-db9432b1ca02 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "e3dd1bc0-f292-4ac7-a8db-324887a18411-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1959.167185] env[62684]: DEBUG oslo_concurrency.lockutils [None req-062c4524-cc95-406e-a83d-db9432b1ca02 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "e3dd1bc0-f292-4ac7-a8db-324887a18411-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1959.173028] env[62684]: INFO nova.compute.manager [None req-062c4524-cc95-406e-a83d-db9432b1ca02 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Terminating instance [ 1959.175313] env[62684]: DEBUG nova.compute.manager [None req-062c4524-cc95-406e-a83d-db9432b1ca02 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1959.175523] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-062c4524-cc95-406e-a83d-db9432b1ca02 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1959.176401] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ede538b5-b720-46fc-bc4c-426e6b3abce0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.184966] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-062c4524-cc95-406e-a83d-db9432b1ca02 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1959.185973] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4a5e8a92-1d15-4155-ae77-727351af9541 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.193465] env[62684]: DEBUG oslo_vmware.api [None req-062c4524-cc95-406e-a83d-db9432b1ca02 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1959.193465] env[62684]: value = "task-2052951" [ 1959.193465] env[62684]: _type = "Task" [ 1959.193465] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1959.202371] env[62684]: DEBUG oslo_vmware.api [None req-062c4524-cc95-406e-a83d-db9432b1ca02 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052951, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.269473] env[62684]: DEBUG oslo_vmware.api [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Task: {'id': task-2052948, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.281637] env[62684]: DEBUG oslo_vmware.api [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Task: {'id': task-2052949, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.111634} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1959.284606] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1959.284964] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-1654224f-ec70-4017-8ec2-cf8ca3fe2bac tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1959.285188] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-1654224f-ec70-4017-8ec2-cf8ca3fe2bac tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1959.285421] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-1654224f-ec70-4017-8ec2-cf8ca3fe2bac tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Deleting the datastore file [datastore1] effc673a-103f-413b-88ac-6907ad1ee852 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1959.286251] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-088b2a14-4c8a-4cd1-9028-e3c39d1b86bf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.288882] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-010132ae-c074-44fc-906e-a91f3f36c240 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.313554] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] 18a97088-fffa-4b77-8ab0-d24f6f84f516/18a97088-fffa-4b77-8ab0-d24f6f84f516.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1959.315527] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9fb2fc3a-9476-47d6-b52c-32fdef025f7e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.331575] env[62684]: DEBUG oslo_vmware.api [None req-1654224f-ec70-4017-8ec2-cf8ca3fe2bac tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Waiting for the task: (returnval){ [ 1959.331575] env[62684]: value = "task-2052952" [ 1959.331575] env[62684]: _type = "Task" [ 1959.331575] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1959.349041] env[62684]: DEBUG oslo_vmware.api [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Waiting for the task: (returnval){ [ 1959.349041] env[62684]: value = "task-2052953" [ 1959.349041] env[62684]: _type = "Task" [ 1959.349041] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1959.356540] env[62684]: DEBUG oslo_vmware.api [None req-1654224f-ec70-4017-8ec2-cf8ca3fe2bac tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': task-2052952, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.362936] env[62684]: DEBUG oslo_vmware.api [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Task: {'id': task-2052953, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.366636] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "refresh_cache-a56a3fab-e491-44f5-9cf4-2c308138ffc4" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1959.366835] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquired lock "refresh_cache-a56a3fab-e491-44f5-9cf4-2c308138ffc4" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1959.367087] env[62684]: DEBUG nova.network.neutron [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1959.403973] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dd10f84f-2d9f-4769-84c7-47e10fb7e40a tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1959.455744] env[62684]: DEBUG oslo_concurrency.lockutils [None req-375f6c9d-bdf1-46f0-a4a6-02f7ef883e4c tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Lock "a3c7943e-7528-41bc-9a20-1e2b57f832e3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.401s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1959.663635] env[62684]: DEBUG oslo_concurrency.lockutils [None req-24b217e1-18bc-43ab-be88-c0745835e337 tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Acquiring lock "a3c7943e-7528-41bc-9a20-1e2b57f832e3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1959.663891] env[62684]: DEBUG oslo_concurrency.lockutils [None req-24b217e1-18bc-43ab-be88-c0745835e337 tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Lock "a3c7943e-7528-41bc-9a20-1e2b57f832e3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1959.665076] env[62684]: DEBUG oslo_concurrency.lockutils [None req-24b217e1-18bc-43ab-be88-c0745835e337 tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Acquiring lock "a3c7943e-7528-41bc-9a20-1e2b57f832e3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1959.665372] env[62684]: DEBUG oslo_concurrency.lockutils [None req-24b217e1-18bc-43ab-be88-c0745835e337 tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Lock "a3c7943e-7528-41bc-9a20-1e2b57f832e3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1959.665516] env[62684]: DEBUG oslo_concurrency.lockutils [None req-24b217e1-18bc-43ab-be88-c0745835e337 tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Lock "a3c7943e-7528-41bc-9a20-1e2b57f832e3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1959.667529] env[62684]: INFO nova.compute.manager [None req-24b217e1-18bc-43ab-be88-c0745835e337 tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Terminating instance [ 1959.669583] env[62684]: DEBUG nova.compute.manager [None req-24b217e1-18bc-43ab-be88-c0745835e337 tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1959.669786] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-24b217e1-18bc-43ab-be88-c0745835e337 tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1959.670626] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04846bbc-a39d-4698-a1e1-62a1f77ba966 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.681256] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-24b217e1-18bc-43ab-be88-c0745835e337 tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1959.681573] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eaf979fb-5c8c-4e7f-8b8c-f5324c3dfc4c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.689571] env[62684]: DEBUG oslo_vmware.api [None req-24b217e1-18bc-43ab-be88-c0745835e337 tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Waiting for the task: (returnval){ [ 1959.689571] env[62684]: value = "task-2052954" [ 1959.689571] env[62684]: _type = "Task" [ 1959.689571] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1959.708583] env[62684]: DEBUG oslo_vmware.api [None req-24b217e1-18bc-43ab-be88-c0745835e337 tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Task: {'id': task-2052954, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.711809] env[62684]: DEBUG oslo_vmware.api [None req-062c4524-cc95-406e-a83d-db9432b1ca02 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052951, 'name': PowerOffVM_Task, 'duration_secs': 0.252194} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1959.714829] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-062c4524-cc95-406e-a83d-db9432b1ca02 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1959.715027] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-062c4524-cc95-406e-a83d-db9432b1ca02 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1959.715511] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dbf1977f-e128-41ae-bc69-3f9b9b862599 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.730150] env[62684]: DEBUG nova.network.neutron [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Updating instance_info_cache with network_info: [{"id": "39c750a6-1076-4354-bc30-d7f50ca821b5", "address": "fa:16:3e:16:fe:89", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.34", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39c750a6-10", "ovs_interfaceid": "39c750a6-1076-4354-bc30-d7f50ca821b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1959.779788] env[62684]: DEBUG oslo_vmware.api [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Task: {'id': task-2052948, 'name': ReconfigVM_Task, 'duration_secs': 0.799691} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1959.780237] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Reconfigured VM instance instance-00000038 to attach disk [datastore1] 31419285-9fdf-4d37-94d7-d1b08c6b6b05/31419285-9fdf-4d37-94d7-d1b08c6b6b05.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1959.781130] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2c7ffc94-af4d-408f-bf75-8f84a1f101ae {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.789638] env[62684]: DEBUG oslo_vmware.api [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Waiting for the task: (returnval){ [ 1959.789638] env[62684]: value = "task-2052956" [ 1959.789638] env[62684]: _type = "Task" [ 1959.789638] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1959.802343] env[62684]: DEBUG oslo_vmware.api [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Task: {'id': task-2052956, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.839202] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-062c4524-cc95-406e-a83d-db9432b1ca02 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1959.839467] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-062c4524-cc95-406e-a83d-db9432b1ca02 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1959.839663] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-062c4524-cc95-406e-a83d-db9432b1ca02 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Deleting the datastore file [datastore1] e3dd1bc0-f292-4ac7-a8db-324887a18411 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1959.842830] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1813f9d9-1975-4cdd-bdbe-d44b4b2d6852 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.845125] env[62684]: DEBUG oslo_vmware.api [None req-1654224f-ec70-4017-8ec2-cf8ca3fe2bac tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': task-2052952, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.22217} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1959.846335] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-1654224f-ec70-4017-8ec2-cf8ca3fe2bac tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1959.846546] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-1654224f-ec70-4017-8ec2-cf8ca3fe2bac tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1959.846730] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-1654224f-ec70-4017-8ec2-cf8ca3fe2bac tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1959.846934] env[62684]: INFO nova.compute.manager [None req-1654224f-ec70-4017-8ec2-cf8ca3fe2bac tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Took 1.32 seconds to destroy the instance on the hypervisor. [ 1959.847217] env[62684]: DEBUG oslo.service.loopingcall [None req-1654224f-ec70-4017-8ec2-cf8ca3fe2bac tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1959.847961] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8aa65a6-f6ff-40a1-9b90-87a0f2a3acf8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.850969] env[62684]: DEBUG nova.compute.manager [-] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1959.851090] env[62684]: DEBUG nova.network.neutron [-] [instance: effc673a-103f-413b-88ac-6907ad1ee852] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1959.858548] env[62684]: DEBUG oslo_vmware.api [None req-062c4524-cc95-406e-a83d-db9432b1ca02 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1959.858548] env[62684]: value = "task-2052957" [ 1959.858548] env[62684]: _type = "Task" [ 1959.858548] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1959.865039] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e8dd4f5-bcb2-43ab-8364-4287197ab3c9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.874073] env[62684]: DEBUG oslo_vmware.api [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Task: {'id': task-2052953, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.880387] env[62684]: DEBUG oslo_vmware.api [None req-062c4524-cc95-406e-a83d-db9432b1ca02 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052957, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.912940] env[62684]: DEBUG nova.network.neutron [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1959.915647] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27718095-d978-44b9-adc2-dc7380c07a2a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.925288] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e21d3899-e87f-4db2-9e26-f5011a5faae1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.940498] env[62684]: DEBUG nova.compute.provider_tree [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1959.959362] env[62684]: DEBUG nova.compute.manager [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1959.987219] env[62684]: DEBUG nova.compute.manager [req-db61da8f-8fbb-4baf-bdf0-ec8519a3c8e2 req-406b1367-fe7e-46a3-a5f9-5c53c042c12b service nova] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Received event network-vif-deleted-56087696-2064-4dae-a727-8e8e32b7bb65 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1960.071538] env[62684]: DEBUG nova.network.neutron [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Updating instance_info_cache with network_info: [{"id": "bf2ef9b1-19b4-40eb-a403-401532281c03", "address": "fa:16:3e:23:cd:97", "network": {"id": "aa52badb-0b73-48bc-afaa-5e06a97d5c7d", "bridge": "br-int", "label": "tempest-ServersTestJSON-556342067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c54f74085f343d2b790145b0d82a9f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf2ef9b1-19", "ovs_interfaceid": "bf2ef9b1-19b4-40eb-a403-401532281c03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1960.201572] env[62684]: DEBUG oslo_vmware.api [None req-24b217e1-18bc-43ab-be88-c0745835e337 tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Task: {'id': task-2052954, 'name': PowerOffVM_Task, 'duration_secs': 0.222544} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1960.202057] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-24b217e1-18bc-43ab-be88-c0745835e337 tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1960.202057] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-24b217e1-18bc-43ab-be88-c0745835e337 tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1960.202310] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0a924e31-15f7-4d97-9ee0-9435e0227837 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.234062] env[62684]: DEBUG oslo_concurrency.lockutils [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Releasing lock "refresh_cache-02dc8c41-5092-4f84-9722-37d4df3a459a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1960.300281] env[62684]: DEBUG oslo_vmware.api [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Task: {'id': task-2052956, 'name': Rename_Task, 'duration_secs': 0.166511} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1960.300598] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1960.300857] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fbe73901-5e4e-46a0-bf8a-4472fd6853da {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.307619] env[62684]: DEBUG oslo_vmware.api [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Waiting for the task: (returnval){ [ 1960.307619] env[62684]: value = "task-2052959" [ 1960.307619] env[62684]: _type = "Task" [ 1960.307619] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1960.316421] env[62684]: DEBUG oslo_vmware.api [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Task: {'id': task-2052959, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1960.360299] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-24b217e1-18bc-43ab-be88-c0745835e337 tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1960.360299] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-24b217e1-18bc-43ab-be88-c0745835e337 tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1960.360299] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-24b217e1-18bc-43ab-be88-c0745835e337 tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Deleting the datastore file [datastore1] a3c7943e-7528-41bc-9a20-1e2b57f832e3 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1960.360299] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-80204eb4-5857-4f33-9c1c-46299a7c1b90 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.365846] env[62684]: DEBUG oslo_vmware.api [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Task: {'id': task-2052953, 'name': ReconfigVM_Task, 'duration_secs': 0.53759} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1960.369719] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Reconfigured VM instance instance-00000039 to attach disk [datastore1] 18a97088-fffa-4b77-8ab0-d24f6f84f516/18a97088-fffa-4b77-8ab0-d24f6f84f516.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1960.370784] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cd61b14a-05e5-49db-b909-e004cb139431 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.376042] env[62684]: DEBUG oslo_vmware.api [None req-24b217e1-18bc-43ab-be88-c0745835e337 tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Waiting for the task: (returnval){ [ 1960.376042] env[62684]: value = "task-2052960" [ 1960.376042] env[62684]: _type = "Task" [ 1960.376042] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1960.379718] env[62684]: DEBUG oslo_vmware.api [None req-062c4524-cc95-406e-a83d-db9432b1ca02 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2052957, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145058} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1960.385658] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-062c4524-cc95-406e-a83d-db9432b1ca02 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1960.385969] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-062c4524-cc95-406e-a83d-db9432b1ca02 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1960.386299] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-062c4524-cc95-406e-a83d-db9432b1ca02 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1960.386596] env[62684]: INFO nova.compute.manager [None req-062c4524-cc95-406e-a83d-db9432b1ca02 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1960.386980] env[62684]: DEBUG oslo.service.loopingcall [None req-062c4524-cc95-406e-a83d-db9432b1ca02 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1960.387407] env[62684]: DEBUG oslo_vmware.api [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Waiting for the task: (returnval){ [ 1960.387407] env[62684]: value = "task-2052961" [ 1960.387407] env[62684]: _type = "Task" [ 1960.387407] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1960.388268] env[62684]: DEBUG nova.compute.manager [-] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1960.388433] env[62684]: DEBUG nova.network.neutron [-] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1960.402093] env[62684]: DEBUG oslo_vmware.api [None req-24b217e1-18bc-43ab-be88-c0745835e337 tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Task: {'id': task-2052960, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1960.409560] env[62684]: DEBUG oslo_vmware.api [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Task: {'id': task-2052961, 'name': Rename_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1960.444288] env[62684]: DEBUG nova.scheduler.client.report [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1960.487654] env[62684]: DEBUG oslo_concurrency.lockutils [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1960.574885] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Releasing lock "refresh_cache-a56a3fab-e491-44f5-9cf4-2c308138ffc4" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1960.575701] env[62684]: DEBUG nova.compute.manager [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Instance network_info: |[{"id": "bf2ef9b1-19b4-40eb-a403-401532281c03", "address": "fa:16:3e:23:cd:97", "network": {"id": "aa52badb-0b73-48bc-afaa-5e06a97d5c7d", "bridge": "br-int", "label": "tempest-ServersTestJSON-556342067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c54f74085f343d2b790145b0d82a9f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf2ef9b1-19", "ovs_interfaceid": "bf2ef9b1-19b4-40eb-a403-401532281c03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1960.576092] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:23:cd:97', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1fb81f98-6f5a-47ab-a512-27277591d064', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bf2ef9b1-19b4-40eb-a403-401532281c03', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1960.583911] env[62684]: DEBUG oslo.service.loopingcall [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1960.584238] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1960.584447] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-86d5f7f7-1fcb-4a92-aff0-b6c93a7cd8fa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.604918] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1960.604918] env[62684]: value = "task-2052962" [ 1960.604918] env[62684]: _type = "Task" [ 1960.604918] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1960.613198] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052962, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1960.818728] env[62684]: DEBUG oslo_vmware.api [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Task: {'id': task-2052959, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1960.889028] env[62684]: DEBUG oslo_vmware.api [None req-24b217e1-18bc-43ab-be88-c0745835e337 tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Task: {'id': task-2052960, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.430097} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1960.889546] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-24b217e1-18bc-43ab-be88-c0745835e337 tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1960.889928] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-24b217e1-18bc-43ab-be88-c0745835e337 tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1960.890289] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-24b217e1-18bc-43ab-be88-c0745835e337 tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1960.893017] env[62684]: INFO nova.compute.manager [None req-24b217e1-18bc-43ab-be88-c0745835e337 tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1960.893017] env[62684]: DEBUG oslo.service.loopingcall [None req-24b217e1-18bc-43ab-be88-c0745835e337 tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1960.893017] env[62684]: DEBUG nova.compute.manager [-] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1960.893017] env[62684]: DEBUG nova.network.neutron [-] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1960.903871] env[62684]: DEBUG oslo_vmware.api [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Task: {'id': task-2052961, 'name': Rename_Task, 'duration_secs': 0.317187} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1960.906017] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1960.906017] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-407e4544-66ef-4c09-9a57-e700c0ad18d2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.911492] env[62684]: DEBUG oslo_vmware.api [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Waiting for the task: (returnval){ [ 1960.911492] env[62684]: value = "task-2052963" [ 1960.911492] env[62684]: _type = "Task" [ 1960.911492] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1960.921236] env[62684]: DEBUG oslo_vmware.api [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Task: {'id': task-2052963, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1960.953035] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.633s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1960.953324] env[62684]: DEBUG nova.compute.manager [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1960.956680] env[62684]: DEBUG oslo_concurrency.lockutils [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.337s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1960.957021] env[62684]: DEBUG oslo_concurrency.lockutils [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1960.960310] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d05225cf-8810-46b4-a8a7-e7b613f7f71f tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.140s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1960.961844] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d05225cf-8810-46b4-a8a7-e7b613f7f71f tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1960.963538] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f821e8cc-c310-4031-9e00-35b428ac52a2 tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.053s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1960.963789] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f821e8cc-c310-4031-9e00-35b428ac52a2 tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1960.965637] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.844s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1960.967400] env[62684]: INFO nova.compute.claims [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1960.994411] env[62684]: INFO nova.scheduler.client.report [None req-f821e8cc-c310-4031-9e00-35b428ac52a2 tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Deleted allocations for instance c6dc5401-f59e-4c18-9553-1240e2f49bce [ 1961.020635] env[62684]: INFO nova.scheduler.client.report [None req-d05225cf-8810-46b4-a8a7-e7b613f7f71f tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Deleted allocations for instance 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7 [ 1961.105995] env[62684]: DEBUG nova.compute.manager [req-a1632476-a229-4288-b9a4-316ca6402667 req-ed3c4e4a-2762-4549-9a3b-c66e0049cb0a service nova] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Received event network-changed-bf2ef9b1-19b4-40eb-a403-401532281c03 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1961.106276] env[62684]: DEBUG nova.compute.manager [req-a1632476-a229-4288-b9a4-316ca6402667 req-ed3c4e4a-2762-4549-9a3b-c66e0049cb0a service nova] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Refreshing instance network info cache due to event network-changed-bf2ef9b1-19b4-40eb-a403-401532281c03. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1961.106659] env[62684]: DEBUG oslo_concurrency.lockutils [req-a1632476-a229-4288-b9a4-316ca6402667 req-ed3c4e4a-2762-4549-9a3b-c66e0049cb0a service nova] Acquiring lock "refresh_cache-a56a3fab-e491-44f5-9cf4-2c308138ffc4" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1961.106798] env[62684]: DEBUG oslo_concurrency.lockutils [req-a1632476-a229-4288-b9a4-316ca6402667 req-ed3c4e4a-2762-4549-9a3b-c66e0049cb0a service nova] Acquired lock "refresh_cache-a56a3fab-e491-44f5-9cf4-2c308138ffc4" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1961.106977] env[62684]: DEBUG nova.network.neutron [req-a1632476-a229-4288-b9a4-316ca6402667 req-ed3c4e4a-2762-4549-9a3b-c66e0049cb0a service nova] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Refreshing network info cache for port bf2ef9b1-19b4-40eb-a403-401532281c03 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1961.120764] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052962, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1961.163270] env[62684]: DEBUG nova.network.neutron [-] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1961.165018] env[62684]: DEBUG nova.network.neutron [-] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1961.318296] env[62684]: DEBUG oslo_vmware.api [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Task: {'id': task-2052959, 'name': PowerOnVM_Task, 'duration_secs': 0.723773} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1961.318664] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1961.318800] env[62684]: INFO nova.compute.manager [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Took 8.81 seconds to spawn the instance on the hypervisor. [ 1961.318983] env[62684]: DEBUG nova.compute.manager [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1961.319772] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-036990a9-83a0-49ed-89f7-5d36df408ee7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.421650] env[62684]: DEBUG oslo_vmware.api [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Task: {'id': task-2052963, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1961.475999] env[62684]: DEBUG nova.compute.utils [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1961.479031] env[62684]: DEBUG nova.compute.manager [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1961.479031] env[62684]: DEBUG nova.network.neutron [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1961.480717] env[62684]: DEBUG oslo_concurrency.lockutils [None req-86f161bf-413a-472d-ad1f-4d0d0cbc2e66 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lock "b4cd871a-30ea-4b7a-98ad-00b8676dc2cd" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 63.312s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1961.481625] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lock "b4cd871a-30ea-4b7a-98ad-00b8676dc2cd" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 39.662s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1961.481917] env[62684]: INFO nova.compute.manager [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Unshelving [ 1961.504420] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f821e8cc-c310-4031-9e00-35b428ac52a2 tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Lock "c6dc5401-f59e-4c18-9553-1240e2f49bce" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.772s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1961.530151] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d05225cf-8810-46b4-a8a7-e7b613f7f71f tempest-ListServersNegativeTestJSON-942603561 tempest-ListServersNegativeTestJSON-942603561-project-member] Lock "5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.625s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1961.561820] env[62684]: DEBUG nova.policy [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b7b99d78251448edaf1d119509f6dedf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'feab568b5c9e41bfa2ca824d44bcc4e7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1961.620631] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052962, 'name': CreateVM_Task, 'duration_secs': 0.595987} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1961.620818] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1961.621610] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1961.621879] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1961.622276] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1961.622736] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d239e518-fbd8-4d78-9bc1-ab59c32fbef8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.627533] env[62684]: DEBUG oslo_vmware.api [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 1961.627533] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5283dabc-1a3e-4199-2fb6-faf017ceb2df" [ 1961.627533] env[62684]: _type = "Task" [ 1961.627533] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1961.636029] env[62684]: DEBUG oslo_vmware.api [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5283dabc-1a3e-4199-2fb6-faf017ceb2df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1961.666852] env[62684]: INFO nova.compute.manager [-] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Took 1.28 seconds to deallocate network for instance. [ 1961.667791] env[62684]: INFO nova.compute.manager [-] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Took 1.82 seconds to deallocate network for instance. [ 1961.755068] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03a7bd8b-3f63-4b54-8a14-8086fb358ecd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.763551] env[62684]: DEBUG nova.network.neutron [-] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1961.780508] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Updating instance '02dc8c41-5092-4f84-9722-37d4df3a459a' progress to 0 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1961.783872] env[62684]: INFO nova.compute.manager [-] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Took 0.89 seconds to deallocate network for instance. [ 1961.843011] env[62684]: INFO nova.compute.manager [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Took 56.87 seconds to build instance. [ 1961.866557] env[62684]: DEBUG nova.network.neutron [req-a1632476-a229-4288-b9a4-316ca6402667 req-ed3c4e4a-2762-4549-9a3b-c66e0049cb0a service nova] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Updated VIF entry in instance network info cache for port bf2ef9b1-19b4-40eb-a403-401532281c03. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1961.866920] env[62684]: DEBUG nova.network.neutron [req-a1632476-a229-4288-b9a4-316ca6402667 req-ed3c4e4a-2762-4549-9a3b-c66e0049cb0a service nova] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Updating instance_info_cache with network_info: [{"id": "bf2ef9b1-19b4-40eb-a403-401532281c03", "address": "fa:16:3e:23:cd:97", "network": {"id": "aa52badb-0b73-48bc-afaa-5e06a97d5c7d", "bridge": "br-int", "label": "tempest-ServersTestJSON-556342067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c54f74085f343d2b790145b0d82a9f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf2ef9b1-19", "ovs_interfaceid": "bf2ef9b1-19b4-40eb-a403-401532281c03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1961.923210] env[62684]: DEBUG oslo_vmware.api [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Task: {'id': task-2052963, 'name': PowerOnVM_Task, 'duration_secs': 0.756872} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1961.923533] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1961.923820] env[62684]: INFO nova.compute.manager [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Took 6.85 seconds to spawn the instance on the hypervisor. [ 1961.924045] env[62684]: DEBUG nova.compute.manager [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1961.925221] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8ca1d12-6efc-46cc-860c-ae26c3c95537 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.982679] env[62684]: DEBUG nova.compute.manager [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1962.055832] env[62684]: DEBUG nova.compute.manager [req-d84fc413-acd5-4fe4-82c0-1a1d13d40781 req-27c73094-7331-4ee9-b315-2d2903fa3e25 service nova] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Received event network-vif-deleted-c0c87a54-475a-48ca-96cc-988f06008d07 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1962.139169] env[62684]: DEBUG oslo_vmware.api [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5283dabc-1a3e-4199-2fb6-faf017ceb2df, 'name': SearchDatastore_Task, 'duration_secs': 0.010461} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1962.139508] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1962.139772] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1962.140038] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1962.140202] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1962.140391] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1962.140657] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d78391a7-d43c-4f02-ac05-39a9f080ffe4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.151920] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1962.152128] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1962.152849] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-166b8a7d-4d28-49de-81ee-84b3736aad18 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.161409] env[62684]: DEBUG oslo_vmware.api [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 1962.161409] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d42d4d-d8c5-e976-11d9-ef0c052b2015" [ 1962.161409] env[62684]: _type = "Task" [ 1962.161409] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1962.174096] env[62684]: DEBUG oslo_vmware.api [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d42d4d-d8c5-e976-11d9-ef0c052b2015, 'name': SearchDatastore_Task, 'duration_secs': 0.009664} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1962.178298] env[62684]: DEBUG oslo_concurrency.lockutils [None req-062c4524-cc95-406e-a83d-db9432b1ca02 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1962.178546] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e0f0897-1912-4d00-b8fe-c9548109d5fd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.181630] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1654224f-ec70-4017-8ec2-cf8ca3fe2bac tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1962.185110] env[62684]: DEBUG oslo_vmware.api [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 1962.185110] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52233ecc-b358-fd06-1a4d-8a3f18792a05" [ 1962.185110] env[62684]: _type = "Task" [ 1962.185110] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1962.195763] env[62684]: DEBUG oslo_vmware.api [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52233ecc-b358-fd06-1a4d-8a3f18792a05, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1962.226390] env[62684]: DEBUG nova.network.neutron [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Successfully created port: 6da08bf6-6c5b-41a3-90e2-d17b27a734e4 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1962.291225] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1962.291865] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-01fd43ac-c64d-4f21-bc69-135add37ec34 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.294038] env[62684]: DEBUG oslo_concurrency.lockutils [None req-24b217e1-18bc-43ab-be88-c0745835e337 tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1962.299854] env[62684]: DEBUG oslo_vmware.api [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for the task: (returnval){ [ 1962.299854] env[62684]: value = "task-2052964" [ 1962.299854] env[62684]: _type = "Task" [ 1962.299854] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1962.307974] env[62684]: DEBUG oslo_vmware.api [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052964, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1962.346025] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e067916e-4631-4b1c-89ad-0d73560c33f2 tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Lock "31419285-9fdf-4d37-94d7-d1b08c6b6b05" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.873s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1962.369573] env[62684]: DEBUG oslo_concurrency.lockutils [req-a1632476-a229-4288-b9a4-316ca6402667 req-ed3c4e4a-2762-4549-9a3b-c66e0049cb0a service nova] Releasing lock "refresh_cache-a56a3fab-e491-44f5-9cf4-2c308138ffc4" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1962.369898] env[62684]: DEBUG nova.compute.manager [req-a1632476-a229-4288-b9a4-316ca6402667 req-ed3c4e4a-2762-4549-9a3b-c66e0049cb0a service nova] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Received event network-vif-deleted-b9e25052-ec41-470d-b549-89e542cb4366 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1962.370113] env[62684]: INFO nova.compute.manager [req-a1632476-a229-4288-b9a4-316ca6402667 req-ed3c4e4a-2762-4549-9a3b-c66e0049cb0a service nova] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Neutron deleted interface b9e25052-ec41-470d-b549-89e542cb4366; detaching it from the instance and deleting it from the info cache [ 1962.370301] env[62684]: DEBUG nova.network.neutron [req-a1632476-a229-4288-b9a4-316ca6402667 req-ed3c4e4a-2762-4549-9a3b-c66e0049cb0a service nova] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1962.447052] env[62684]: INFO nova.compute.manager [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Took 52.67 seconds to build instance. [ 1962.492989] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c63735a-05f7-41bd-be05-ec9c78fb28aa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.497427] env[62684]: DEBUG nova.compute.utils [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1962.505341] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5db70e2a-1049-4212-958a-eeb20aab9022 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.539918] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c044f5a5-7daf-400a-9719-79d29dd9ea78 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.548862] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25fa97fd-1ff2-4613-a6ec-2cd871c9e163 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.565703] env[62684]: DEBUG nova.compute.provider_tree [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1962.695714] env[62684]: DEBUG oslo_vmware.api [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52233ecc-b358-fd06-1a4d-8a3f18792a05, 'name': SearchDatastore_Task, 'duration_secs': 0.010279} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1962.695981] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1962.696288] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] a56a3fab-e491-44f5-9cf4-2c308138ffc4/a56a3fab-e491-44f5-9cf4-2c308138ffc4.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1962.696910] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-86c9b9e5-9c50-4090-bcae-063472bc73d4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.703198] env[62684]: DEBUG oslo_vmware.api [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 1962.703198] env[62684]: value = "task-2052965" [ 1962.703198] env[62684]: _type = "Task" [ 1962.703198] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1962.713206] env[62684]: DEBUG oslo_vmware.api [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2052965, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1962.811718] env[62684]: DEBUG oslo_vmware.api [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052964, 'name': PowerOffVM_Task, 'duration_secs': 0.272739} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1962.812054] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1962.812212] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Updating instance '02dc8c41-5092-4f84-9722-37d4df3a459a' progress to 17 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1962.873649] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-81e7b0c9-661e-4f0b-8142-08d9ce7c6283 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.883382] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0cac9a6-07b5-4560-9b2e-f50c779d6ee1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.916087] env[62684]: DEBUG nova.compute.manager [req-a1632476-a229-4288-b9a4-316ca6402667 req-ed3c4e4a-2762-4549-9a3b-c66e0049cb0a service nova] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Detach interface failed, port_id=b9e25052-ec41-470d-b549-89e542cb4366, reason: Instance effc673a-103f-413b-88ac-6907ad1ee852 could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1962.950972] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a1e9d0dd-ccdf-42d2-911a-ff0e15df6926 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Lock "18a97088-fffa-4b77-8ab0-d24f6f84f516" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.159s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1962.998373] env[62684]: DEBUG nova.compute.manager [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1963.001790] env[62684]: INFO nova.virt.block_device [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Booting with volume 554d5299-0a48-44f8-bb8e-9328f519c7ee at /dev/sdb [ 1963.026921] env[62684]: DEBUG nova.virt.hardware [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1963.027186] env[62684]: DEBUG nova.virt.hardware [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1963.027426] env[62684]: DEBUG nova.virt.hardware [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1963.027634] env[62684]: DEBUG nova.virt.hardware [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1963.027815] env[62684]: DEBUG nova.virt.hardware [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1963.028100] env[62684]: DEBUG nova.virt.hardware [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1963.032161] env[62684]: DEBUG nova.virt.hardware [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1963.032414] env[62684]: DEBUG nova.virt.hardware [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1963.033361] env[62684]: DEBUG nova.virt.hardware [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1963.033361] env[62684]: DEBUG nova.virt.hardware [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1963.033361] env[62684]: DEBUG nova.virt.hardware [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1963.034059] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efc707ba-2b4c-4430-956e-70364f20d726 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.047201] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5814a6e-b4b1-4ef4-b234-e0ce48bb90b7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.054201] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-50cb864d-e500-4617-9e64-0515b21814f6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.069178] env[62684]: DEBUG nova.scheduler.client.report [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1963.076931] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f22dbcc-2efe-4032-8c2a-075ba84fd4c1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.111286] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2c5b127d-5f44-4418-82ca-3c62431148a9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.121946] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-296ffb64-08bd-4d3c-b39b-b7e66cfedecc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.162713] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f639bd6-754d-401c-90ea-7179358562d5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.173224] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8491dfa7-ee58-434c-9549-4acb1b99cd2b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.189973] env[62684]: DEBUG nova.virt.block_device [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Updating existing volume attachment record: c29603be-b550-4dbd-9f0c-8437c64fb394 {{(pid=62684) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1963.215030] env[62684]: DEBUG oslo_vmware.api [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2052965, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1963.303733] env[62684]: DEBUG nova.compute.manager [None req-3526d58c-c224-4ef2-a3e6-1692ba61aed7 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1963.304776] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bff2e4c-e8ae-439e-800b-0c34b67555e5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.318744] env[62684]: DEBUG nova.virt.hardware [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1963.319090] env[62684]: DEBUG nova.virt.hardware [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1963.319204] env[62684]: DEBUG nova.virt.hardware [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1963.319378] env[62684]: DEBUG nova.virt.hardware [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1963.319656] env[62684]: DEBUG nova.virt.hardware [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1963.319719] env[62684]: DEBUG nova.virt.hardware [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1963.320120] env[62684]: DEBUG nova.virt.hardware [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1963.320120] env[62684]: DEBUG nova.virt.hardware [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1963.320480] env[62684]: DEBUG nova.virt.hardware [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1963.320685] env[62684]: DEBUG nova.virt.hardware [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1963.320895] env[62684]: DEBUG nova.virt.hardware [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1963.325908] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8d522f3f-5276-4569-bae4-b66107a1b0d1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.341468] env[62684]: DEBUG oslo_vmware.api [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for the task: (returnval){ [ 1963.341468] env[62684]: value = "task-2052966" [ 1963.341468] env[62684]: _type = "Task" [ 1963.341468] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1963.351430] env[62684]: DEBUG oslo_vmware.api [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052966, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1963.579553] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.611s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1963.579553] env[62684]: DEBUG nova.compute.manager [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1963.585519] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 34.812s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1963.585830] env[62684]: DEBUG nova.objects.instance [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62684) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1963.663043] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b4ecfac9-0ca5-4a13-950d-1b3e4d04c5fd tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Acquiring lock "18a97088-fffa-4b77-8ab0-d24f6f84f516" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1963.663565] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b4ecfac9-0ca5-4a13-950d-1b3e4d04c5fd tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Lock "18a97088-fffa-4b77-8ab0-d24f6f84f516" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1963.664061] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b4ecfac9-0ca5-4a13-950d-1b3e4d04c5fd tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Acquiring lock "18a97088-fffa-4b77-8ab0-d24f6f84f516-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1963.664592] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b4ecfac9-0ca5-4a13-950d-1b3e4d04c5fd tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Lock "18a97088-fffa-4b77-8ab0-d24f6f84f516-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1963.664906] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b4ecfac9-0ca5-4a13-950d-1b3e4d04c5fd tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Lock "18a97088-fffa-4b77-8ab0-d24f6f84f516-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1963.667775] env[62684]: INFO nova.compute.manager [None req-b4ecfac9-0ca5-4a13-950d-1b3e4d04c5fd tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Terminating instance [ 1963.673987] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b4ecfac9-0ca5-4a13-950d-1b3e4d04c5fd tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Acquiring lock "refresh_cache-18a97088-fffa-4b77-8ab0-d24f6f84f516" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1963.673987] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b4ecfac9-0ca5-4a13-950d-1b3e4d04c5fd tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Acquired lock "refresh_cache-18a97088-fffa-4b77-8ab0-d24f6f84f516" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1963.673987] env[62684]: DEBUG nova.network.neutron [None req-b4ecfac9-0ca5-4a13-950d-1b3e4d04c5fd tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1963.679212] env[62684]: DEBUG nova.compute.manager [req-e8370c2f-81af-4510-8d71-797c10cc9d3b req-b63ca2a3-5480-492d-a362-0305a3f7f2e9 service nova] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Received event network-vif-deleted-31109cc7-c34c-42b7-8245-d5d2e71162ad {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1963.715936] env[62684]: DEBUG oslo_vmware.api [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2052965, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.568865} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1963.716442] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] a56a3fab-e491-44f5-9cf4-2c308138ffc4/a56a3fab-e491-44f5-9cf4-2c308138ffc4.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1963.719115] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1963.719115] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-28c63176-186a-4541-a36e-efcb21447429 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.725804] env[62684]: DEBUG oslo_vmware.api [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 1963.725804] env[62684]: value = "task-2052970" [ 1963.725804] env[62684]: _type = "Task" [ 1963.725804] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1963.735749] env[62684]: DEBUG oslo_vmware.api [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2052970, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1963.818999] env[62684]: INFO nova.compute.manager [None req-3526d58c-c224-4ef2-a3e6-1692ba61aed7 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] instance snapshotting [ 1963.819700] env[62684]: DEBUG nova.objects.instance [None req-3526d58c-c224-4ef2-a3e6-1692ba61aed7 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Lazy-loading 'flavor' on Instance uuid 18a97088-fffa-4b77-8ab0-d24f6f84f516 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1963.854279] env[62684]: DEBUG oslo_vmware.api [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052966, 'name': ReconfigVM_Task, 'duration_secs': 0.200578} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1963.855054] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Updating instance '02dc8c41-5092-4f84-9722-37d4df3a459a' progress to 33 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1964.092467] env[62684]: DEBUG nova.compute.utils [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1964.098218] env[62684]: DEBUG nova.compute.manager [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1964.098218] env[62684]: DEBUG nova.network.neutron [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1964.120842] env[62684]: DEBUG nova.compute.manager [req-8b580f9a-c447-4bd8-85fd-106a2161ad32 req-9f4b56f1-d05f-464d-9c10-866e4e4d9a81 service nova] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Received event network-changed-d15c8cc5-3728-42eb-8a3a-e5b3390bf5dd {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1964.121239] env[62684]: DEBUG nova.compute.manager [req-8b580f9a-c447-4bd8-85fd-106a2161ad32 req-9f4b56f1-d05f-464d-9c10-866e4e4d9a81 service nova] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Refreshing instance network info cache due to event network-changed-d15c8cc5-3728-42eb-8a3a-e5b3390bf5dd. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1964.121543] env[62684]: DEBUG oslo_concurrency.lockutils [req-8b580f9a-c447-4bd8-85fd-106a2161ad32 req-9f4b56f1-d05f-464d-9c10-866e4e4d9a81 service nova] Acquiring lock "refresh_cache-31419285-9fdf-4d37-94d7-d1b08c6b6b05" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1964.121736] env[62684]: DEBUG oslo_concurrency.lockutils [req-8b580f9a-c447-4bd8-85fd-106a2161ad32 req-9f4b56f1-d05f-464d-9c10-866e4e4d9a81 service nova] Acquired lock "refresh_cache-31419285-9fdf-4d37-94d7-d1b08c6b6b05" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1964.121945] env[62684]: DEBUG nova.network.neutron [req-8b580f9a-c447-4bd8-85fd-106a2161ad32 req-9f4b56f1-d05f-464d-9c10-866e4e4d9a81 service nova] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Refreshing network info cache for port d15c8cc5-3728-42eb-8a3a-e5b3390bf5dd {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1964.180138] env[62684]: DEBUG nova.policy [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e1553097f3b048caa76bff4e5ecfbf5f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '398aed99e10d457e9cadda3239b27831', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1964.208268] env[62684]: DEBUG nova.network.neutron [None req-b4ecfac9-0ca5-4a13-950d-1b3e4d04c5fd tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1964.237900] env[62684]: DEBUG oslo_vmware.api [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2052970, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066799} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1964.238466] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1964.239421] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dc73e2b-8ce7-43c6-b57a-a18118e2246e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.266806] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Reconfiguring VM instance instance-0000003a to attach disk [datastore2] a56a3fab-e491-44f5-9cf4-2c308138ffc4/a56a3fab-e491-44f5-9cf4-2c308138ffc4.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1964.267210] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7a7edb5b-170c-4212-b114-df4a653b6771 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.289703] env[62684]: DEBUG oslo_vmware.api [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 1964.289703] env[62684]: value = "task-2052971" [ 1964.289703] env[62684]: _type = "Task" [ 1964.289703] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1964.296082] env[62684]: DEBUG nova.network.neutron [None req-b4ecfac9-0ca5-4a13-950d-1b3e4d04c5fd tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1964.301727] env[62684]: DEBUG oslo_vmware.api [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2052971, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1964.330020] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70ce786a-9dcd-4b5e-a3ac-b7b809b83a95 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.344803] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c5ef5c4-08d7-42ad-ac50-ec7c59b20d58 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.363891] env[62684]: DEBUG nova.virt.hardware [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:47:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='d1dcf74d-6a75-42cb-977e-e0fc87b2d673',id=39,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1732807329',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1964.363891] env[62684]: DEBUG nova.virt.hardware [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1964.363891] env[62684]: DEBUG nova.virt.hardware [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1964.363891] env[62684]: DEBUG nova.virt.hardware [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1964.363891] env[62684]: DEBUG nova.virt.hardware [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1964.363891] env[62684]: DEBUG nova.virt.hardware [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1964.368016] env[62684]: DEBUG nova.virt.hardware [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1964.368016] env[62684]: DEBUG nova.virt.hardware [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1964.368016] env[62684]: DEBUG nova.virt.hardware [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1964.368016] env[62684]: DEBUG nova.virt.hardware [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1964.368016] env[62684]: DEBUG nova.virt.hardware [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1964.373166] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Reconfiguring VM instance instance-00000030 to detach disk 2000 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1964.374024] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ca3344d1-c587-4694-83c3-103cd55090ea {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.392129] env[62684]: DEBUG nova.network.neutron [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Successfully updated port: 6da08bf6-6c5b-41a3-90e2-d17b27a734e4 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1964.398308] env[62684]: DEBUG oslo_vmware.api [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for the task: (returnval){ [ 1964.398308] env[62684]: value = "task-2052972" [ 1964.398308] env[62684]: _type = "Task" [ 1964.398308] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1964.408200] env[62684]: DEBUG oslo_vmware.api [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052972, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1964.572267] env[62684]: DEBUG nova.network.neutron [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Successfully created port: 4ee262da-da6d-457d-a523-6d14746195e0 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1964.601803] env[62684]: DEBUG nova.compute.manager [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1964.605717] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b3a377ed-7d17-4922-92e8-de2d063ddcee tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.020s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1964.608209] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.253s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1964.608209] env[62684]: DEBUG nova.objects.instance [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lazy-loading 'resources' on Instance uuid 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1964.800593] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b4ecfac9-0ca5-4a13-950d-1b3e4d04c5fd tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Releasing lock "refresh_cache-18a97088-fffa-4b77-8ab0-d24f6f84f516" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1964.801138] env[62684]: DEBUG nova.compute.manager [None req-b4ecfac9-0ca5-4a13-950d-1b3e4d04c5fd tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1964.801416] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b4ecfac9-0ca5-4a13-950d-1b3e4d04c5fd tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1964.801776] env[62684]: DEBUG oslo_vmware.api [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2052971, 'name': ReconfigVM_Task, 'duration_secs': 0.330166} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1964.802608] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6888b04c-248b-4046-820b-4d40653ee4b8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.805368] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Reconfigured VM instance instance-0000003a to attach disk [datastore2] a56a3fab-e491-44f5-9cf4-2c308138ffc4/a56a3fab-e491-44f5-9cf4-2c308138ffc4.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1964.806054] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-870bd5c4-df32-4737-b5c1-7eeb1ea75654 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.812558] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4ecfac9-0ca5-4a13-950d-1b3e4d04c5fd tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1964.813837] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8c11ba6f-bc4a-4bf6-aa6a-d24d149a4a41 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.815570] env[62684]: DEBUG oslo_vmware.api [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 1964.815570] env[62684]: value = "task-2052973" [ 1964.815570] env[62684]: _type = "Task" [ 1964.815570] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1964.820609] env[62684]: DEBUG oslo_vmware.api [None req-b4ecfac9-0ca5-4a13-950d-1b3e4d04c5fd tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Waiting for the task: (returnval){ [ 1964.820609] env[62684]: value = "task-2052974" [ 1964.820609] env[62684]: _type = "Task" [ 1964.820609] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1964.826493] env[62684]: DEBUG oslo_vmware.api [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2052973, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1964.833607] env[62684]: DEBUG oslo_vmware.api [None req-b4ecfac9-0ca5-4a13-950d-1b3e4d04c5fd tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Task: {'id': task-2052974, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1964.856544] env[62684]: DEBUG nova.compute.manager [None req-3526d58c-c224-4ef2-a3e6-1692ba61aed7 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Instance disappeared during snapshot {{(pid=62684) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4495}} [ 1964.893136] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Acquiring lock "refresh_cache-dab11b88-ac23-43f0-9203-024faf41e1f5" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1964.893367] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Acquired lock "refresh_cache-dab11b88-ac23-43f0-9203-024faf41e1f5" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1964.893572] env[62684]: DEBUG nova.network.neutron [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1964.909936] env[62684]: DEBUG oslo_vmware.api [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052972, 'name': ReconfigVM_Task, 'duration_secs': 0.164346} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1964.910283] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Reconfigured VM instance instance-00000030 to detach disk 2000 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1964.911219] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efffaafc-96c9-4543-9d12-9786b47c6176 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.654486] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Reconfiguring VM instance instance-00000030 to attach disk [datastore2] 02dc8c41-5092-4f84-9722-37d4df3a459a/02dc8c41-5092-4f84-9722-37d4df3a459a.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1965.658172] env[62684]: DEBUG nova.network.neutron [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Successfully created port: 7f34208d-7594-4a0b-8fef-8e970496dc46 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1965.668885] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cd54e954-8810-43f1-b385-26c118f9c9f9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.688209] env[62684]: DEBUG oslo_vmware.api [None req-b4ecfac9-0ca5-4a13-950d-1b3e4d04c5fd tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Task: {'id': task-2052974, 'name': PowerOffVM_Task, 'duration_secs': 0.242724} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1965.691201] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4ecfac9-0ca5-4a13-950d-1b3e4d04c5fd tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1965.691461] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b4ecfac9-0ca5-4a13-950d-1b3e4d04c5fd tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1965.691887] env[62684]: DEBUG oslo_vmware.api [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for the task: (returnval){ [ 1965.691887] env[62684]: value = "task-2052976" [ 1965.691887] env[62684]: _type = "Task" [ 1965.691887] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1965.692152] env[62684]: DEBUG oslo_vmware.api [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2052973, 'name': Rename_Task, 'duration_secs': 0.15285} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1965.692963] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4a27dc83-0d0d-48c5-8a10-b98a5fab2f61 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.694980] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1965.701216] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-69c74076-a1c2-44ec-9aae-cfe3725252d6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.709569] env[62684]: DEBUG oslo_vmware.api [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052976, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.711336] env[62684]: DEBUG oslo_vmware.api [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 1965.711336] env[62684]: value = "task-2052978" [ 1965.711336] env[62684]: _type = "Task" [ 1965.711336] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1965.720356] env[62684]: DEBUG oslo_vmware.api [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2052978, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.725435] env[62684]: DEBUG nova.compute.manager [req-d9adc3ff-84b5-4380-9d12-13957ea35fbc req-15c15e10-d1a8-4415-b41a-203c0a4050b4 service nova] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Received event network-vif-plugged-6da08bf6-6c5b-41a3-90e2-d17b27a734e4 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1965.725435] env[62684]: DEBUG oslo_concurrency.lockutils [req-d9adc3ff-84b5-4380-9d12-13957ea35fbc req-15c15e10-d1a8-4415-b41a-203c0a4050b4 service nova] Acquiring lock "dab11b88-ac23-43f0-9203-024faf41e1f5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1965.725666] env[62684]: DEBUG oslo_concurrency.lockutils [req-d9adc3ff-84b5-4380-9d12-13957ea35fbc req-15c15e10-d1a8-4415-b41a-203c0a4050b4 service nova] Lock "dab11b88-ac23-43f0-9203-024faf41e1f5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1965.725839] env[62684]: DEBUG oslo_concurrency.lockutils [req-d9adc3ff-84b5-4380-9d12-13957ea35fbc req-15c15e10-d1a8-4415-b41a-203c0a4050b4 service nova] Lock "dab11b88-ac23-43f0-9203-024faf41e1f5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1965.726237] env[62684]: DEBUG nova.compute.manager [req-d9adc3ff-84b5-4380-9d12-13957ea35fbc req-15c15e10-d1a8-4415-b41a-203c0a4050b4 service nova] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] No waiting events found dispatching network-vif-plugged-6da08bf6-6c5b-41a3-90e2-d17b27a734e4 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1965.726237] env[62684]: WARNING nova.compute.manager [req-d9adc3ff-84b5-4380-9d12-13957ea35fbc req-15c15e10-d1a8-4415-b41a-203c0a4050b4 service nova] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Received unexpected event network-vif-plugged-6da08bf6-6c5b-41a3-90e2-d17b27a734e4 for instance with vm_state building and task_state spawning. [ 1965.726379] env[62684]: DEBUG nova.compute.manager [req-d9adc3ff-84b5-4380-9d12-13957ea35fbc req-15c15e10-d1a8-4415-b41a-203c0a4050b4 service nova] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Received event network-changed-6da08bf6-6c5b-41a3-90e2-d17b27a734e4 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1965.726515] env[62684]: DEBUG nova.compute.manager [req-d9adc3ff-84b5-4380-9d12-13957ea35fbc req-15c15e10-d1a8-4415-b41a-203c0a4050b4 service nova] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Refreshing instance network info cache due to event network-changed-6da08bf6-6c5b-41a3-90e2-d17b27a734e4. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1965.726681] env[62684]: DEBUG oslo_concurrency.lockutils [req-d9adc3ff-84b5-4380-9d12-13957ea35fbc req-15c15e10-d1a8-4415-b41a-203c0a4050b4 service nova] Acquiring lock "refresh_cache-dab11b88-ac23-43f0-9203-024faf41e1f5" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1965.728160] env[62684]: DEBUG nova.compute.manager [None req-3526d58c-c224-4ef2-a3e6-1692ba61aed7 tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Found 0 images (rotation: 2) {{(pid=62684) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4555}} [ 1965.730551] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b4ecfac9-0ca5-4a13-950d-1b3e4d04c5fd tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1965.730825] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b4ecfac9-0ca5-4a13-950d-1b3e4d04c5fd tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1965.731042] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4ecfac9-0ca5-4a13-950d-1b3e4d04c5fd tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Deleting the datastore file [datastore1] 18a97088-fffa-4b77-8ab0-d24f6f84f516 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1965.731551] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bbc5bb19-8b5c-4003-8118-c937084d462c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.738654] env[62684]: DEBUG oslo_vmware.api [None req-b4ecfac9-0ca5-4a13-950d-1b3e4d04c5fd tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Waiting for the task: (returnval){ [ 1965.738654] env[62684]: value = "task-2052979" [ 1965.738654] env[62684]: _type = "Task" [ 1965.738654] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1965.745567] env[62684]: DEBUG nova.network.neutron [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1965.752177] env[62684]: DEBUG oslo_vmware.api [None req-b4ecfac9-0ca5-4a13-950d-1b3e4d04c5fd tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Task: {'id': task-2052979, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.921480] env[62684]: DEBUG nova.network.neutron [req-8b580f9a-c447-4bd8-85fd-106a2161ad32 req-9f4b56f1-d05f-464d-9c10-866e4e4d9a81 service nova] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Updated VIF entry in instance network info cache for port d15c8cc5-3728-42eb-8a3a-e5b3390bf5dd. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1965.921480] env[62684]: DEBUG nova.network.neutron [req-8b580f9a-c447-4bd8-85fd-106a2161ad32 req-9f4b56f1-d05f-464d-9c10-866e4e4d9a81 service nova] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Updating instance_info_cache with network_info: [{"id": "d15c8cc5-3728-42eb-8a3a-e5b3390bf5dd", "address": "fa:16:3e:6b:13:58", "network": {"id": "88f09773-de83-4126-80be-f9d85222a3aa", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1417499811-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.146", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9473d6611ded48cd9e9d10a6f0bf1a3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd15c8cc5-37", "ovs_interfaceid": "d15c8cc5-3728-42eb-8a3a-e5b3390bf5dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1966.021739] env[62684]: DEBUG nova.network.neutron [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Successfully created port: e42d7614-a61b-4dd8-bfda-e086b3dc3317 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1966.089194] env[62684]: DEBUG nova.network.neutron [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Updating instance_info_cache with network_info: [{"id": "6da08bf6-6c5b-41a3-90e2-d17b27a734e4", "address": "fa:16:3e:d2:cc:6c", "network": {"id": "899020b7-a29e-4a35-bf3c-f9aebda1208d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1035902693-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "feab568b5c9e41bfa2ca824d44bcc4e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "06eaa4c9-dbc2-4d38-a844-7bf76e7b5a64", "external-id": "nsx-vlan-transportzone-804", "segmentation_id": 804, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6da08bf6-6c", "ovs_interfaceid": "6da08bf6-6c5b-41a3-90e2-d17b27a734e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1966.158316] env[62684]: DEBUG nova.compute.manager [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1966.189056] env[62684]: DEBUG nova.virt.hardware [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1966.189056] env[62684]: DEBUG nova.virt.hardware [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1966.189056] env[62684]: DEBUG nova.virt.hardware [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1966.189419] env[62684]: DEBUG nova.virt.hardware [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1966.189419] env[62684]: DEBUG nova.virt.hardware [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1966.189516] env[62684]: DEBUG nova.virt.hardware [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1966.189716] env[62684]: DEBUG nova.virt.hardware [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1966.190017] env[62684]: DEBUG nova.virt.hardware [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1966.190101] env[62684]: DEBUG nova.virt.hardware [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1966.190282] env[62684]: DEBUG nova.virt.hardware [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1966.190463] env[62684]: DEBUG nova.virt.hardware [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1966.191445] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c029aa05-d172-48ff-8fc8-cfdf5b4e5de2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.203535] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26d8b1cb-27da-4fc3-a0bd-83aa9a0c1e46 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.212958] env[62684]: DEBUG oslo_vmware.api [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052976, 'name': ReconfigVM_Task, 'duration_secs': 0.322809} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1966.217341] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Reconfigured VM instance instance-00000030 to attach disk [datastore2] 02dc8c41-5092-4f84-9722-37d4df3a459a/02dc8c41-5092-4f84-9722-37d4df3a459a.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1966.217555] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Updating instance '02dc8c41-5092-4f84-9722-37d4df3a459a' progress to 50 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1966.221490] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a843412a-e0a0-4652-9a03-b213cafb216e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.239314] env[62684]: DEBUG oslo_vmware.api [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2052978, 'name': PowerOnVM_Task, 'duration_secs': 0.48781} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1966.241671] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1966.241671] env[62684]: INFO nova.compute.manager [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Took 8.47 seconds to spawn the instance on the hypervisor. [ 1966.241811] env[62684]: DEBUG nova.compute.manager [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1966.243028] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7a54df7-7c97-43a5-bc2d-6099a586331d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.248526] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-137290b5-02ed-48a4-be6e-9cda86030b38 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.257574] env[62684]: DEBUG oslo_vmware.api [None req-b4ecfac9-0ca5-4a13-950d-1b3e4d04c5fd tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Task: {'id': task-2052979, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.109582} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1966.282860] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4ecfac9-0ca5-4a13-950d-1b3e4d04c5fd tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1966.283103] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b4ecfac9-0ca5-4a13-950d-1b3e4d04c5fd tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1966.283351] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b4ecfac9-0ca5-4a13-950d-1b3e4d04c5fd tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1966.283527] env[62684]: INFO nova.compute.manager [None req-b4ecfac9-0ca5-4a13-950d-1b3e4d04c5fd tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Took 1.48 seconds to destroy the instance on the hypervisor. [ 1966.283705] env[62684]: DEBUG oslo.service.loopingcall [None req-b4ecfac9-0ca5-4a13-950d-1b3e4d04c5fd tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1966.288203] env[62684]: DEBUG nova.compute.manager [-] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1966.288311] env[62684]: DEBUG nova.network.neutron [-] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1966.290502] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3be855c0-4253-417b-94e3-9956b6698ed8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.299154] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d465be07-7ea8-414f-8cb9-d893072bf237 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.314162] env[62684]: DEBUG nova.compute.provider_tree [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1966.316863] env[62684]: DEBUG nova.network.neutron [-] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1966.423842] env[62684]: DEBUG oslo_concurrency.lockutils [req-8b580f9a-c447-4bd8-85fd-106a2161ad32 req-9f4b56f1-d05f-464d-9c10-866e4e4d9a81 service nova] Releasing lock "refresh_cache-31419285-9fdf-4d37-94d7-d1b08c6b6b05" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1966.592360] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Releasing lock "refresh_cache-dab11b88-ac23-43f0-9203-024faf41e1f5" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1966.593180] env[62684]: DEBUG nova.compute.manager [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Instance network_info: |[{"id": "6da08bf6-6c5b-41a3-90e2-d17b27a734e4", "address": "fa:16:3e:d2:cc:6c", "network": {"id": "899020b7-a29e-4a35-bf3c-f9aebda1208d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1035902693-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "feab568b5c9e41bfa2ca824d44bcc4e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "06eaa4c9-dbc2-4d38-a844-7bf76e7b5a64", "external-id": "nsx-vlan-transportzone-804", "segmentation_id": 804, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6da08bf6-6c", "ovs_interfaceid": "6da08bf6-6c5b-41a3-90e2-d17b27a734e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1966.593180] env[62684]: DEBUG oslo_concurrency.lockutils [req-d9adc3ff-84b5-4380-9d12-13957ea35fbc req-15c15e10-d1a8-4415-b41a-203c0a4050b4 service nova] Acquired lock "refresh_cache-dab11b88-ac23-43f0-9203-024faf41e1f5" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1966.593376] env[62684]: DEBUG nova.network.neutron [req-d9adc3ff-84b5-4380-9d12-13957ea35fbc req-15c15e10-d1a8-4415-b41a-203c0a4050b4 service nova] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Refreshing network info cache for port 6da08bf6-6c5b-41a3-90e2-d17b27a734e4 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1966.595045] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d2:cc:6c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '06eaa4c9-dbc2-4d38-a844-7bf76e7b5a64', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6da08bf6-6c5b-41a3-90e2-d17b27a734e4', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1966.601902] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Creating folder: Project (feab568b5c9e41bfa2ca824d44bcc4e7). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1966.603103] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1156001e-0f0c-4fcd-8477-437b5cbc9b9e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.614554] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Created folder: Project (feab568b5c9e41bfa2ca824d44bcc4e7) in parent group-v421118. [ 1966.614554] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Creating folder: Instances. Parent ref: group-v421288. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1966.614703] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b46c00a3-bf1e-45f4-b2fd-1037278944bb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.625030] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Created folder: Instances in parent group-v421288. [ 1966.625030] env[62684]: DEBUG oslo.service.loopingcall [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1966.625221] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1966.625356] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-972d0e13-88cb-44fa-ab0b-50210c46f390 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.643227] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1966.643227] env[62684]: value = "task-2052982" [ 1966.643227] env[62684]: _type = "Task" [ 1966.643227] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1966.650934] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052982, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1966.736520] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87d2cff1-96c3-4a63-b29f-c8933f9d03a1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.758648] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52152387-5059-46aa-8467-23b729b030e4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.777271] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Updating instance '02dc8c41-5092-4f84-9722-37d4df3a459a' progress to 67 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1966.796194] env[62684]: INFO nova.compute.manager [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Took 52.53 seconds to build instance. [ 1966.821811] env[62684]: DEBUG nova.network.neutron [-] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1966.835216] env[62684]: ERROR nova.scheduler.client.report [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [req-812d7dc7-f1ae-4eab-ba40-b004dbbc6586] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c23c281e-ec1f-4876-972e-a98655f2084f. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-812d7dc7-f1ae-4eab-ba40-b004dbbc6586"}]} [ 1966.851634] env[62684]: DEBUG nova.scheduler.client.report [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Refreshing inventories for resource provider c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1966.867501] env[62684]: DEBUG nova.scheduler.client.report [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Updating ProviderTree inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1966.867845] env[62684]: DEBUG nova.compute.provider_tree [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1966.879595] env[62684]: DEBUG nova.scheduler.client.report [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Refreshing aggregate associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, aggregates: None {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1966.898816] env[62684]: DEBUG nova.scheduler.client.report [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Refreshing trait associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1967.155613] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052982, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.279823] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fc702fa-1911-40b6-ac90-a1684fd840b9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.290275] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04a0ac58-149a-4288-82fe-fb2707509384 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.320559] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d3972e87-6edb-4f93-8ec7-8c93c3dd9923 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "a56a3fab-e491-44f5-9cf4-2c308138ffc4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.904s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1967.321673] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07d3a680-7e2e-4ad8-bc31-8291330c76e4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.324461] env[62684]: INFO nova.compute.manager [-] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Took 1.04 seconds to deallocate network for instance. [ 1967.332930] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e638b262-d078-4a61-95a4-776d65a73f24 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.349247] env[62684]: DEBUG nova.compute.provider_tree [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1967.394765] env[62684]: DEBUG nova.network.neutron [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Port 39c750a6-1076-4354-bc30-d7f50ca821b5 binding to destination host cpu-1 is already ACTIVE {{(pid=62684) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1967.463580] env[62684]: DEBUG nova.network.neutron [req-d9adc3ff-84b5-4380-9d12-13957ea35fbc req-15c15e10-d1a8-4415-b41a-203c0a4050b4 service nova] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Updated VIF entry in instance network info cache for port 6da08bf6-6c5b-41a3-90e2-d17b27a734e4. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1967.463967] env[62684]: DEBUG nova.network.neutron [req-d9adc3ff-84b5-4380-9d12-13957ea35fbc req-15c15e10-d1a8-4415-b41a-203c0a4050b4 service nova] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Updating instance_info_cache with network_info: [{"id": "6da08bf6-6c5b-41a3-90e2-d17b27a734e4", "address": "fa:16:3e:d2:cc:6c", "network": {"id": "899020b7-a29e-4a35-bf3c-f9aebda1208d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1035902693-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "feab568b5c9e41bfa2ca824d44bcc4e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "06eaa4c9-dbc2-4d38-a844-7bf76e7b5a64", "external-id": "nsx-vlan-transportzone-804", "segmentation_id": 804, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6da08bf6-6c", "ovs_interfaceid": "6da08bf6-6c5b-41a3-90e2-d17b27a734e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1967.569357] env[62684]: DEBUG nova.network.neutron [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Successfully updated port: 4ee262da-da6d-457d-a523-6d14746195e0 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1967.589206] env[62684]: DEBUG nova.compute.manager [req-1c493c30-b8fc-45b7-b527-feb4893bd1aa req-25483bd6-24c3-4c9b-8e38-305a0f7265bc service nova] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Received event network-vif-plugged-4ee262da-da6d-457d-a523-6d14746195e0 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1967.589439] env[62684]: DEBUG oslo_concurrency.lockutils [req-1c493c30-b8fc-45b7-b527-feb4893bd1aa req-25483bd6-24c3-4c9b-8e38-305a0f7265bc service nova] Acquiring lock "2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1967.589658] env[62684]: DEBUG oslo_concurrency.lockutils [req-1c493c30-b8fc-45b7-b527-feb4893bd1aa req-25483bd6-24c3-4c9b-8e38-305a0f7265bc service nova] Lock "2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1967.589854] env[62684]: DEBUG oslo_concurrency.lockutils [req-1c493c30-b8fc-45b7-b527-feb4893bd1aa req-25483bd6-24c3-4c9b-8e38-305a0f7265bc service nova] Lock "2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1967.590048] env[62684]: DEBUG nova.compute.manager [req-1c493c30-b8fc-45b7-b527-feb4893bd1aa req-25483bd6-24c3-4c9b-8e38-305a0f7265bc service nova] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] No waiting events found dispatching network-vif-plugged-4ee262da-da6d-457d-a523-6d14746195e0 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1967.590980] env[62684]: WARNING nova.compute.manager [req-1c493c30-b8fc-45b7-b527-feb4893bd1aa req-25483bd6-24c3-4c9b-8e38-305a0f7265bc service nova] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Received unexpected event network-vif-plugged-4ee262da-da6d-457d-a523-6d14746195e0 for instance with vm_state building and task_state spawning. [ 1967.655939] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052982, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.832846] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b4ecfac9-0ca5-4a13-950d-1b3e4d04c5fd tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1967.879938] env[62684]: DEBUG nova.scheduler.client.report [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Updated inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f with generation 90 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1967.880260] env[62684]: DEBUG nova.compute.provider_tree [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Updating resource provider c23c281e-ec1f-4876-972e-a98655f2084f generation from 90 to 91 during operation: update_inventory {{(pid=62684) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1967.880457] env[62684]: DEBUG nova.compute.provider_tree [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1967.967049] env[62684]: DEBUG oslo_concurrency.lockutils [req-d9adc3ff-84b5-4380-9d12-13957ea35fbc req-15c15e10-d1a8-4415-b41a-203c0a4050b4 service nova] Releasing lock "refresh_cache-dab11b88-ac23-43f0-9203-024faf41e1f5" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1968.049791] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5a92802f-7ad8-408c-9c51-78ee6b99a4cf tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "a56a3fab-e491-44f5-9cf4-2c308138ffc4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1968.050369] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5a92802f-7ad8-408c-9c51-78ee6b99a4cf tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "a56a3fab-e491-44f5-9cf4-2c308138ffc4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1968.050369] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5a92802f-7ad8-408c-9c51-78ee6b99a4cf tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "a56a3fab-e491-44f5-9cf4-2c308138ffc4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1968.050520] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5a92802f-7ad8-408c-9c51-78ee6b99a4cf tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "a56a3fab-e491-44f5-9cf4-2c308138ffc4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1968.050700] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5a92802f-7ad8-408c-9c51-78ee6b99a4cf tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "a56a3fab-e491-44f5-9cf4-2c308138ffc4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1968.052895] env[62684]: INFO nova.compute.manager [None req-5a92802f-7ad8-408c-9c51-78ee6b99a4cf tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Terminating instance [ 1968.054631] env[62684]: DEBUG nova.compute.manager [None req-5a92802f-7ad8-408c-9c51-78ee6b99a4cf tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1968.054834] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5a92802f-7ad8-408c-9c51-78ee6b99a4cf tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1968.055683] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-419d5684-5c99-44ce-a1b5-7f570ea816bb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.064608] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a92802f-7ad8-408c-9c51-78ee6b99a4cf tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1968.064834] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1a66fff5-db27-430f-a612-6e751914c0c5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.070662] env[62684]: DEBUG oslo_vmware.api [None req-5a92802f-7ad8-408c-9c51-78ee6b99a4cf tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 1968.070662] env[62684]: value = "task-2052983" [ 1968.070662] env[62684]: _type = "Task" [ 1968.070662] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1968.079710] env[62684]: DEBUG oslo_vmware.api [None req-5a92802f-7ad8-408c-9c51-78ee6b99a4cf tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2052983, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.155845] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052982, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.386058] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.778s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1968.387662] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9b058a39-119a-40b8-9a4b-3032d26bbc5c tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.021s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1968.387985] env[62684]: DEBUG nova.objects.instance [None req-9b058a39-119a-40b8-9a4b-3032d26bbc5c tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Lazy-loading 'resources' on Instance uuid 8449f09b-4e7b-4511-bb3c-2ff6667addb2 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1968.416271] env[62684]: DEBUG oslo_concurrency.lockutils [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquiring lock "02dc8c41-5092-4f84-9722-37d4df3a459a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1968.416524] env[62684]: DEBUG oslo_concurrency.lockutils [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Lock "02dc8c41-5092-4f84-9722-37d4df3a459a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1968.416708] env[62684]: DEBUG oslo_concurrency.lockutils [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Lock "02dc8c41-5092-4f84-9722-37d4df3a459a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1968.418267] env[62684]: INFO nova.scheduler.client.report [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Deleted allocations for instance 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978 [ 1968.581470] env[62684]: DEBUG oslo_vmware.api [None req-5a92802f-7ad8-408c-9c51-78ee6b99a4cf tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2052983, 'name': PowerOffVM_Task, 'duration_secs': 0.29401} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1968.581706] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a92802f-7ad8-408c-9c51-78ee6b99a4cf tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1968.581885] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5a92802f-7ad8-408c-9c51-78ee6b99a4cf tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1968.582211] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e5f9a6a9-5ec4-4882-8569-dedc7b32002a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.656729] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052982, 'name': CreateVM_Task, 'duration_secs': 1.567838} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1968.656845] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1968.657578] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1968.657760] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1968.658606] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1968.658606] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ab14314-31d5-4db2-82c0-02016e384da6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.662789] env[62684]: DEBUG oslo_vmware.api [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Waiting for the task: (returnval){ [ 1968.662789] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5246d8dc-4940-26bb-3f3e-9526e07bfe70" [ 1968.662789] env[62684]: _type = "Task" [ 1968.662789] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1968.670274] env[62684]: DEBUG oslo_vmware.api [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5246d8dc-4940-26bb-3f3e-9526e07bfe70, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.822230] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5a92802f-7ad8-408c-9c51-78ee6b99a4cf tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1968.822964] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5a92802f-7ad8-408c-9c51-78ee6b99a4cf tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1968.822964] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a92802f-7ad8-408c-9c51-78ee6b99a4cf tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Deleting the datastore file [datastore2] a56a3fab-e491-44f5-9cf4-2c308138ffc4 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1968.822964] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7cfb66f1-02e0-4556-9c5c-ca41400a7903 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.828813] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1968.830865] env[62684]: DEBUG oslo_vmware.api [None req-5a92802f-7ad8-408c-9c51-78ee6b99a4cf tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 1968.830865] env[62684]: value = "task-2052985" [ 1968.830865] env[62684]: _type = "Task" [ 1968.830865] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1968.838685] env[62684]: DEBUG oslo_vmware.api [None req-5a92802f-7ad8-408c-9c51-78ee6b99a4cf tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2052985, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.926585] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8a840957-1c27-4c7f-b45a-c4c06418c5ea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "4e5152b0-7bac-4dc2-b6c7-6590fa2d5978" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.336s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1969.175912] env[62684]: DEBUG oslo_vmware.api [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5246d8dc-4940-26bb-3f3e-9526e07bfe70, 'name': SearchDatastore_Task, 'duration_secs': 0.011024} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1969.176067] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1969.176364] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1969.176626] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1969.176779] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1969.177177] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1969.177374] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-caa2b790-42fa-4251-ad2e-bf0016aa11b1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.186161] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1969.186396] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1969.187156] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96f92ae7-9538-4475-8dac-f1ed27db0d19 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.196176] env[62684]: DEBUG oslo_vmware.api [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Waiting for the task: (returnval){ [ 1969.196176] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520bc362-9bb8-27e3-935c-55c2d8e8c67b" [ 1969.196176] env[62684]: _type = "Task" [ 1969.196176] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1969.203843] env[62684]: DEBUG oslo_vmware.api [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520bc362-9bb8-27e3-935c-55c2d8e8c67b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1969.312298] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af22afd8-3285-4c01-9815-34233b02978f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.321581] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a368a3e-2b6e-4a21-a35e-5fe7a421d24d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.356099] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99795099-da96-4b77-9d08-34ef14c7db28 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.365917] env[62684]: DEBUG oslo_vmware.api [None req-5a92802f-7ad8-408c-9c51-78ee6b99a4cf tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2052985, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.181504} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1969.368274] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a92802f-7ad8-408c-9c51-78ee6b99a4cf tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1969.368515] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5a92802f-7ad8-408c-9c51-78ee6b99a4cf tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1969.368885] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5a92802f-7ad8-408c-9c51-78ee6b99a4cf tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1969.368944] env[62684]: INFO nova.compute.manager [None req-5a92802f-7ad8-408c-9c51-78ee6b99a4cf tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Took 1.31 seconds to destroy the instance on the hypervisor. [ 1969.369249] env[62684]: DEBUG oslo.service.loopingcall [None req-5a92802f-7ad8-408c-9c51-78ee6b99a4cf tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1969.369595] env[62684]: DEBUG nova.compute.manager [-] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1969.369638] env[62684]: DEBUG nova.network.neutron [-] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1969.372219] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9c9611d-9f09-4499-a81c-c231bddfc024 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.387535] env[62684]: DEBUG nova.compute.provider_tree [None req-9b058a39-119a-40b8-9a4b-3032d26bbc5c tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1969.502594] env[62684]: DEBUG oslo_concurrency.lockutils [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquiring lock "refresh_cache-02dc8c41-5092-4f84-9722-37d4df3a459a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1969.502889] env[62684]: DEBUG oslo_concurrency.lockutils [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquired lock "refresh_cache-02dc8c41-5092-4f84-9722-37d4df3a459a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1969.502980] env[62684]: DEBUG nova.network.neutron [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1969.582824] env[62684]: DEBUG nova.network.neutron [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Successfully updated port: 7f34208d-7594-4a0b-8fef-8e970496dc46 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1969.706418] env[62684]: DEBUG oslo_vmware.api [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520bc362-9bb8-27e3-935c-55c2d8e8c67b, 'name': SearchDatastore_Task, 'duration_secs': 0.009236} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1969.707266] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b04e45b-23ff-431d-91fd-2a513a294b36 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.713143] env[62684]: DEBUG oslo_vmware.api [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Waiting for the task: (returnval){ [ 1969.713143] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5297eae9-70ef-777d-57ce-599621413374" [ 1969.713143] env[62684]: _type = "Task" [ 1969.713143] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1969.722791] env[62684]: DEBUG oslo_vmware.api [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5297eae9-70ef-777d-57ce-599621413374, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1969.891165] env[62684]: DEBUG nova.scheduler.client.report [None req-9b058a39-119a-40b8-9a4b-3032d26bbc5c tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1969.907746] env[62684]: DEBUG nova.compute.manager [req-8c739d5e-278c-4d77-9f88-471d0ecf984f req-0762132e-c1cd-421f-b7de-64b9be6272b8 service nova] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Received event network-changed-4ee262da-da6d-457d-a523-6d14746195e0 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1969.907949] env[62684]: DEBUG nova.compute.manager [req-8c739d5e-278c-4d77-9f88-471d0ecf984f req-0762132e-c1cd-421f-b7de-64b9be6272b8 service nova] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Refreshing instance network info cache due to event network-changed-4ee262da-da6d-457d-a523-6d14746195e0. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1969.908184] env[62684]: DEBUG oslo_concurrency.lockutils [req-8c739d5e-278c-4d77-9f88-471d0ecf984f req-0762132e-c1cd-421f-b7de-64b9be6272b8 service nova] Acquiring lock "refresh_cache-2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1969.908329] env[62684]: DEBUG oslo_concurrency.lockutils [req-8c739d5e-278c-4d77-9f88-471d0ecf984f req-0762132e-c1cd-421f-b7de-64b9be6272b8 service nova] Acquired lock "refresh_cache-2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1969.908495] env[62684]: DEBUG nova.network.neutron [req-8c739d5e-278c-4d77-9f88-471d0ecf984f req-0762132e-c1cd-421f-b7de-64b9be6272b8 service nova] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Refreshing network info cache for port 4ee262da-da6d-457d-a523-6d14746195e0 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1970.213367] env[62684]: DEBUG nova.network.neutron [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Updating instance_info_cache with network_info: [{"id": "39c750a6-1076-4354-bc30-d7f50ca821b5", "address": "fa:16:3e:16:fe:89", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.34", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39c750a6-10", "ovs_interfaceid": "39c750a6-1076-4354-bc30-d7f50ca821b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1970.225464] env[62684]: DEBUG oslo_vmware.api [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5297eae9-70ef-777d-57ce-599621413374, 'name': SearchDatastore_Task, 'duration_secs': 0.01068} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1970.225842] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1970.226019] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] dab11b88-ac23-43f0-9203-024faf41e1f5/dab11b88-ac23-43f0-9203-024faf41e1f5.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1970.226312] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-768e4b52-1f2a-4446-bec0-fbf224830700 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.233256] env[62684]: DEBUG oslo_vmware.api [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Waiting for the task: (returnval){ [ 1970.233256] env[62684]: value = "task-2052986" [ 1970.233256] env[62684]: _type = "Task" [ 1970.233256] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1970.247500] env[62684]: DEBUG oslo_vmware.api [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': task-2052986, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1970.396248] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9b058a39-119a-40b8-9a4b-3032d26bbc5c tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.008s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1970.400531] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.295s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1970.404226] env[62684]: INFO nova.compute.claims [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1970.430619] env[62684]: INFO nova.scheduler.client.report [None req-9b058a39-119a-40b8-9a4b-3032d26bbc5c tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Deleted allocations for instance 8449f09b-4e7b-4511-bb3c-2ff6667addb2 [ 1970.456585] env[62684]: DEBUG nova.network.neutron [req-8c739d5e-278c-4d77-9f88-471d0ecf984f req-0762132e-c1cd-421f-b7de-64b9be6272b8 service nova] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1970.496752] env[62684]: DEBUG nova.compute.manager [req-658cb3e3-d3a7-4b00-b637-18be813c174c req-ee8740c7-2700-4b4b-abe5-63e3e14553dc service nova] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Received event network-vif-deleted-bf2ef9b1-19b4-40eb-a403-401532281c03 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1970.496969] env[62684]: INFO nova.compute.manager [req-658cb3e3-d3a7-4b00-b637-18be813c174c req-ee8740c7-2700-4b4b-abe5-63e3e14553dc service nova] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Neutron deleted interface bf2ef9b1-19b4-40eb-a403-401532281c03; detaching it from the instance and deleting it from the info cache [ 1970.497224] env[62684]: DEBUG nova.network.neutron [req-658cb3e3-d3a7-4b00-b637-18be813c174c req-ee8740c7-2700-4b4b-abe5-63e3e14553dc service nova] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1970.564626] env[62684]: DEBUG nova.network.neutron [req-8c739d5e-278c-4d77-9f88-471d0ecf984f req-0762132e-c1cd-421f-b7de-64b9be6272b8 service nova] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1970.640626] env[62684]: DEBUG nova.network.neutron [-] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1970.720404] env[62684]: DEBUG oslo_concurrency.lockutils [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Releasing lock "refresh_cache-02dc8c41-5092-4f84-9722-37d4df3a459a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1970.747278] env[62684]: DEBUG oslo_vmware.api [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': task-2052986, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.502551} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1970.747278] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] dab11b88-ac23-43f0-9203-024faf41e1f5/dab11b88-ac23-43f0-9203-024faf41e1f5.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1970.747278] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1970.747278] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5adf6992-65d2-4fb1-b126-e6378a5403f5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.750941] env[62684]: DEBUG oslo_vmware.api [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Waiting for the task: (returnval){ [ 1970.750941] env[62684]: value = "task-2052987" [ 1970.750941] env[62684]: _type = "Task" [ 1970.750941] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1970.761056] env[62684]: DEBUG oslo_vmware.api [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': task-2052987, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1970.940987] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9b058a39-119a-40b8-9a4b-3032d26bbc5c tempest-ServerShowV257Test-1797705574 tempest-ServerShowV257Test-1797705574-project-member] Lock "8449f09b-4e7b-4511-bb3c-2ff6667addb2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.647s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1971.000691] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-622d6a7f-a156-45e5-835d-e3e6b54364fc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.011198] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbf1abc5-553b-471e-aed5-77fa9ef9927d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.044982] env[62684]: DEBUG nova.compute.manager [req-658cb3e3-d3a7-4b00-b637-18be813c174c req-ee8740c7-2700-4b4b-abe5-63e3e14553dc service nova] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Detach interface failed, port_id=bf2ef9b1-19b4-40eb-a403-401532281c03, reason: Instance a56a3fab-e491-44f5-9cf4-2c308138ffc4 could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1971.067805] env[62684]: DEBUG oslo_concurrency.lockutils [req-8c739d5e-278c-4d77-9f88-471d0ecf984f req-0762132e-c1cd-421f-b7de-64b9be6272b8 service nova] Releasing lock "refresh_cache-2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1971.068126] env[62684]: DEBUG nova.compute.manager [req-8c739d5e-278c-4d77-9f88-471d0ecf984f req-0762132e-c1cd-421f-b7de-64b9be6272b8 service nova] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Received event network-vif-plugged-7f34208d-7594-4a0b-8fef-8e970496dc46 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1971.068413] env[62684]: DEBUG oslo_concurrency.lockutils [req-8c739d5e-278c-4d77-9f88-471d0ecf984f req-0762132e-c1cd-421f-b7de-64b9be6272b8 service nova] Acquiring lock "2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1971.068546] env[62684]: DEBUG oslo_concurrency.lockutils [req-8c739d5e-278c-4d77-9f88-471d0ecf984f req-0762132e-c1cd-421f-b7de-64b9be6272b8 service nova] Lock "2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1971.068715] env[62684]: DEBUG oslo_concurrency.lockutils [req-8c739d5e-278c-4d77-9f88-471d0ecf984f req-0762132e-c1cd-421f-b7de-64b9be6272b8 service nova] Lock "2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1971.068884] env[62684]: DEBUG nova.compute.manager [req-8c739d5e-278c-4d77-9f88-471d0ecf984f req-0762132e-c1cd-421f-b7de-64b9be6272b8 service nova] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] No waiting events found dispatching network-vif-plugged-7f34208d-7594-4a0b-8fef-8e970496dc46 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1971.069066] env[62684]: WARNING nova.compute.manager [req-8c739d5e-278c-4d77-9f88-471d0ecf984f req-0762132e-c1cd-421f-b7de-64b9be6272b8 service nova] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Received unexpected event network-vif-plugged-7f34208d-7594-4a0b-8fef-8e970496dc46 for instance with vm_state building and task_state spawning. [ 1971.069237] env[62684]: DEBUG nova.compute.manager [req-8c739d5e-278c-4d77-9f88-471d0ecf984f req-0762132e-c1cd-421f-b7de-64b9be6272b8 service nova] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Received event network-changed-7f34208d-7594-4a0b-8fef-8e970496dc46 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1971.069393] env[62684]: DEBUG nova.compute.manager [req-8c739d5e-278c-4d77-9f88-471d0ecf984f req-0762132e-c1cd-421f-b7de-64b9be6272b8 service nova] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Refreshing instance network info cache due to event network-changed-7f34208d-7594-4a0b-8fef-8e970496dc46. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1971.069586] env[62684]: DEBUG oslo_concurrency.lockutils [req-8c739d5e-278c-4d77-9f88-471d0ecf984f req-0762132e-c1cd-421f-b7de-64b9be6272b8 service nova] Acquiring lock "refresh_cache-2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1971.069726] env[62684]: DEBUG oslo_concurrency.lockutils [req-8c739d5e-278c-4d77-9f88-471d0ecf984f req-0762132e-c1cd-421f-b7de-64b9be6272b8 service nova] Acquired lock "refresh_cache-2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1971.069970] env[62684]: DEBUG nova.network.neutron [req-8c739d5e-278c-4d77-9f88-471d0ecf984f req-0762132e-c1cd-421f-b7de-64b9be6272b8 service nova] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Refreshing network info cache for port 7f34208d-7594-4a0b-8fef-8e970496dc46 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1971.148647] env[62684]: INFO nova.compute.manager [-] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Took 1.78 seconds to deallocate network for instance. [ 1971.245987] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50642461-2e44-4182-ba0e-feda231aae0f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.271432] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d12d147-7234-4e00-a3d7-fbe87474d516 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.277536] env[62684]: DEBUG oslo_vmware.api [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': task-2052987, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.192208} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1971.278335] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1971.279147] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eed8403e-eec9-4066-a185-5d5d3989c336 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.284244] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Updating instance '02dc8c41-5092-4f84-9722-37d4df3a459a' progress to 83 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1971.310902] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Reconfiguring VM instance instance-0000003b to attach disk [datastore2] dab11b88-ac23-43f0-9203-024faf41e1f5/dab11b88-ac23-43f0-9203-024faf41e1f5.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1971.311418] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4aee92b7-6f4a-4e44-afc7-792c2f77c2ce {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.332162] env[62684]: DEBUG oslo_vmware.api [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Waiting for the task: (returnval){ [ 1971.332162] env[62684]: value = "task-2052988" [ 1971.332162] env[62684]: _type = "Task" [ 1971.332162] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1971.341496] env[62684]: DEBUG oslo_vmware.api [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': task-2052988, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1971.623319] env[62684]: DEBUG nova.network.neutron [req-8c739d5e-278c-4d77-9f88-471d0ecf984f req-0762132e-c1cd-421f-b7de-64b9be6272b8 service nova] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1971.656180] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5a92802f-7ad8-408c-9c51-78ee6b99a4cf tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1971.720716] env[62684]: DEBUG nova.network.neutron [req-8c739d5e-278c-4d77-9f88-471d0ecf984f req-0762132e-c1cd-421f-b7de-64b9be6272b8 service nova] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1971.790729] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1971.791952] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8d38e902-0fed-48b0-a413-acebc603bf42 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.798837] env[62684]: DEBUG oslo_vmware.api [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for the task: (returnval){ [ 1971.798837] env[62684]: value = "task-2052989" [ 1971.798837] env[62684]: _type = "Task" [ 1971.798837] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1971.807537] env[62684]: DEBUG oslo_vmware.api [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052989, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1971.842324] env[62684]: DEBUG oslo_vmware.api [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': task-2052988, 'name': ReconfigVM_Task, 'duration_secs': 0.497694} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1971.842847] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Reconfigured VM instance instance-0000003b to attach disk [datastore2] dab11b88-ac23-43f0-9203-024faf41e1f5/dab11b88-ac23-43f0-9203-024faf41e1f5.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1971.843689] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3381c06b-e631-430f-b349-500f0d4838e8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.857752] env[62684]: DEBUG oslo_vmware.api [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Waiting for the task: (returnval){ [ 1971.857752] env[62684]: value = "task-2052990" [ 1971.857752] env[62684]: _type = "Task" [ 1971.857752] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1971.864305] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79622d13-41f5-4b93-a890-6e4e07eaf1bc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.873275] env[62684]: DEBUG oslo_vmware.api [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': task-2052990, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1971.876242] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ec3cc62-21ef-4fed-9aa5-c8dd95742836 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.908573] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-195282a0-8ce0-413d-92fa-e0874c105c9d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.914018] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "81b7949d-be24-46c9-8dc8-c249b65bb039" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1971.914269] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "81b7949d-be24-46c9-8dc8-c249b65bb039" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1971.920763] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2170652e-e482-40c8-bc49-579fcd9c9891 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.935752] env[62684]: DEBUG nova.compute.provider_tree [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1972.199822] env[62684]: DEBUG nova.network.neutron [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Successfully updated port: e42d7614-a61b-4dd8-bfda-e086b3dc3317 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1972.223028] env[62684]: DEBUG oslo_concurrency.lockutils [req-8c739d5e-278c-4d77-9f88-471d0ecf984f req-0762132e-c1cd-421f-b7de-64b9be6272b8 service nova] Releasing lock "refresh_cache-2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1972.308764] env[62684]: DEBUG oslo_vmware.api [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052989, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1972.367630] env[62684]: DEBUG oslo_vmware.api [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': task-2052990, 'name': Rename_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1972.417491] env[62684]: DEBUG nova.compute.manager [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1972.439471] env[62684]: DEBUG nova.scheduler.client.report [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1972.579329] env[62684]: DEBUG nova.compute.manager [req-9157e431-ea72-454b-bfd6-fc0707bc58c6 req-9456d79a-e992-4e1b-a6f4-1bd6b212e594 service nova] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Received event network-vif-plugged-e42d7614-a61b-4dd8-bfda-e086b3dc3317 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1972.579562] env[62684]: DEBUG oslo_concurrency.lockutils [req-9157e431-ea72-454b-bfd6-fc0707bc58c6 req-9456d79a-e992-4e1b-a6f4-1bd6b212e594 service nova] Acquiring lock "2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1972.579775] env[62684]: DEBUG oslo_concurrency.lockutils [req-9157e431-ea72-454b-bfd6-fc0707bc58c6 req-9456d79a-e992-4e1b-a6f4-1bd6b212e594 service nova] Lock "2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1972.579948] env[62684]: DEBUG oslo_concurrency.lockutils [req-9157e431-ea72-454b-bfd6-fc0707bc58c6 req-9456d79a-e992-4e1b-a6f4-1bd6b212e594 service nova] Lock "2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1972.580161] env[62684]: DEBUG nova.compute.manager [req-9157e431-ea72-454b-bfd6-fc0707bc58c6 req-9456d79a-e992-4e1b-a6f4-1bd6b212e594 service nova] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] No waiting events found dispatching network-vif-plugged-e42d7614-a61b-4dd8-bfda-e086b3dc3317 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1972.580338] env[62684]: WARNING nova.compute.manager [req-9157e431-ea72-454b-bfd6-fc0707bc58c6 req-9456d79a-e992-4e1b-a6f4-1bd6b212e594 service nova] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Received unexpected event network-vif-plugged-e42d7614-a61b-4dd8-bfda-e086b3dc3317 for instance with vm_state building and task_state spawning. [ 1972.580508] env[62684]: DEBUG nova.compute.manager [req-9157e431-ea72-454b-bfd6-fc0707bc58c6 req-9456d79a-e992-4e1b-a6f4-1bd6b212e594 service nova] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Received event network-changed-e42d7614-a61b-4dd8-bfda-e086b3dc3317 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1972.580675] env[62684]: DEBUG nova.compute.manager [req-9157e431-ea72-454b-bfd6-fc0707bc58c6 req-9456d79a-e992-4e1b-a6f4-1bd6b212e594 service nova] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Refreshing instance network info cache due to event network-changed-e42d7614-a61b-4dd8-bfda-e086b3dc3317. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1972.580924] env[62684]: DEBUG oslo_concurrency.lockutils [req-9157e431-ea72-454b-bfd6-fc0707bc58c6 req-9456d79a-e992-4e1b-a6f4-1bd6b212e594 service nova] Acquiring lock "refresh_cache-2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1972.581048] env[62684]: DEBUG oslo_concurrency.lockutils [req-9157e431-ea72-454b-bfd6-fc0707bc58c6 req-9456d79a-e992-4e1b-a6f4-1bd6b212e594 service nova] Acquired lock "refresh_cache-2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1972.581163] env[62684]: DEBUG nova.network.neutron [req-9157e431-ea72-454b-bfd6-fc0707bc58c6 req-9456d79a-e992-4e1b-a6f4-1bd6b212e594 service nova] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Refreshing network info cache for port e42d7614-a61b-4dd8-bfda-e086b3dc3317 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1972.702028] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Acquiring lock "refresh_cache-2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1972.809795] env[62684]: DEBUG oslo_vmware.api [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052989, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1972.868274] env[62684]: DEBUG oslo_vmware.api [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': task-2052990, 'name': Rename_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1972.943663] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1972.944514] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.545s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1972.945015] env[62684]: DEBUG nova.compute.manager [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1972.947840] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.901s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1972.949305] env[62684]: INFO nova.compute.claims [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1973.137704] env[62684]: DEBUG nova.network.neutron [req-9157e431-ea72-454b-bfd6-fc0707bc58c6 req-9456d79a-e992-4e1b-a6f4-1bd6b212e594 service nova] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1973.226094] env[62684]: DEBUG nova.network.neutron [req-9157e431-ea72-454b-bfd6-fc0707bc58c6 req-9456d79a-e992-4e1b-a6f4-1bd6b212e594 service nova] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1973.311553] env[62684]: DEBUG oslo_vmware.api [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2052989, 'name': PowerOnVM_Task, 'duration_secs': 1.340156} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1973.311830] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1973.312055] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-94eacf3f-292e-459c-9bb1-ece0b41d3328 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Updating instance '02dc8c41-5092-4f84-9722-37d4df3a459a' progress to 100 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1973.369065] env[62684]: DEBUG oslo_vmware.api [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': task-2052990, 'name': Rename_Task, 'duration_secs': 1.093322} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1973.369376] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1973.369639] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-431a3f00-d775-4558-a732-542a21eda131 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.376581] env[62684]: DEBUG oslo_vmware.api [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Waiting for the task: (returnval){ [ 1973.376581] env[62684]: value = "task-2052991" [ 1973.376581] env[62684]: _type = "Task" [ 1973.376581] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1973.385969] env[62684]: DEBUG oslo_vmware.api [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': task-2052991, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1973.453681] env[62684]: DEBUG nova.compute.utils [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1973.457792] env[62684]: DEBUG nova.compute.manager [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1973.457792] env[62684]: DEBUG nova.network.neutron [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1973.509877] env[62684]: DEBUG nova.policy [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e3a532747bda4c7e8aa2892b424a47ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '263c101fcc5e493789b79dfd1ba97cc0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1973.729659] env[62684]: DEBUG oslo_concurrency.lockutils [req-9157e431-ea72-454b-bfd6-fc0707bc58c6 req-9456d79a-e992-4e1b-a6f4-1bd6b212e594 service nova] Releasing lock "refresh_cache-2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1973.729659] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Acquired lock "refresh_cache-2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1973.729659] env[62684]: DEBUG nova.network.neutron [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1973.890411] env[62684]: DEBUG oslo_vmware.api [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': task-2052991, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1973.902108] env[62684]: DEBUG nova.network.neutron [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Successfully created port: 0a96e2ce-2335-44e2-940d-26d3afbafa3a {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1973.959407] env[62684]: DEBUG nova.compute.manager [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1974.272701] env[62684]: DEBUG nova.network.neutron [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1974.390286] env[62684]: DEBUG oslo_vmware.api [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': task-2052991, 'name': PowerOnVM_Task, 'duration_secs': 0.516858} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1974.390556] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1974.390751] env[62684]: INFO nova.compute.manager [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Took 11.39 seconds to spawn the instance on the hypervisor. [ 1974.390938] env[62684]: DEBUG nova.compute.manager [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1974.393766] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dd608d4-4e3c-4a7c-8f7d-f1206bfc30fd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.429634] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0434ac7-8500-49e8-9f48-d4803b910673 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.439217] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb9c2d88-1851-48a9-b919-8585437e0a55 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.480132] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14e7ba9a-9a0f-4c29-bcab-9d63bd5964f6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.488739] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f159dd99-4032-43f9-94d8-3f978b781f69 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.507646] env[62684]: DEBUG nova.compute.provider_tree [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1974.740327] env[62684]: DEBUG nova.network.neutron [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Updating instance_info_cache with network_info: [{"id": "4ee262da-da6d-457d-a523-6d14746195e0", "address": "fa:16:3e:aa:35:57", "network": {"id": "08b060f5-b54c-44fb-b154-44f39707094d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1693424751", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.173", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "398aed99e10d457e9cadda3239b27831", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ee262da-da", "ovs_interfaceid": "4ee262da-da6d-457d-a523-6d14746195e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "7f34208d-7594-4a0b-8fef-8e970496dc46", "address": "fa:16:3e:47:fc:3a", "network": {"id": "36053f3d-f5ae-45b1-8837-9cf3452e81bd", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-568683507", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.232", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "398aed99e10d457e9cadda3239b27831", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "678ebbe4-4c53-4eaf-a689-93981310f37d", "external-id": "nsx-vlan-transportzone-443", "segmentation_id": 443, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f34208d-75", "ovs_interfaceid": "7f34208d-7594-4a0b-8fef-8e970496dc46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e42d7614-a61b-4dd8-bfda-e086b3dc3317", "address": "fa:16:3e:9c:bd:c3", "network": {"id": "08b060f5-b54c-44fb-b154-44f39707094d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1693424751", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.164", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "398aed99e10d457e9cadda3239b27831", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape42d7614-a6", "ovs_interfaceid": "e42d7614-a61b-4dd8-bfda-e086b3dc3317", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1974.916833] env[62684]: INFO nova.compute.manager [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Took 56.17 seconds to build instance. [ 1974.985251] env[62684]: DEBUG nova.compute.manager [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1975.011829] env[62684]: DEBUG nova.virt.hardware [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1975.012117] env[62684]: DEBUG nova.virt.hardware [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1975.012289] env[62684]: DEBUG nova.virt.hardware [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1975.012483] env[62684]: DEBUG nova.virt.hardware [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1975.012637] env[62684]: DEBUG nova.virt.hardware [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1975.012779] env[62684]: DEBUG nova.virt.hardware [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1975.012993] env[62684]: DEBUG nova.virt.hardware [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1975.013183] env[62684]: DEBUG nova.virt.hardware [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1975.013869] env[62684]: DEBUG nova.virt.hardware [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1975.013869] env[62684]: DEBUG nova.virt.hardware [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1975.013869] env[62684]: DEBUG nova.virt.hardware [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1975.014619] env[62684]: DEBUG nova.scheduler.client.report [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1975.018247] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50decd88-5d34-4db5-8fe2-dffdde48ff58 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.026894] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-371485a6-26d5-490c-804e-1a7552129b20 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.243633] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Releasing lock "refresh_cache-2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1975.247028] env[62684]: DEBUG nova.compute.manager [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Instance network_info: |[{"id": "4ee262da-da6d-457d-a523-6d14746195e0", "address": "fa:16:3e:aa:35:57", "network": {"id": "08b060f5-b54c-44fb-b154-44f39707094d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1693424751", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.173", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "398aed99e10d457e9cadda3239b27831", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ee262da-da", "ovs_interfaceid": "4ee262da-da6d-457d-a523-6d14746195e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "7f34208d-7594-4a0b-8fef-8e970496dc46", "address": "fa:16:3e:47:fc:3a", "network": {"id": "36053f3d-f5ae-45b1-8837-9cf3452e81bd", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-568683507", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.232", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "398aed99e10d457e9cadda3239b27831", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "678ebbe4-4c53-4eaf-a689-93981310f37d", "external-id": "nsx-vlan-transportzone-443", "segmentation_id": 443, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f34208d-75", "ovs_interfaceid": "7f34208d-7594-4a0b-8fef-8e970496dc46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e42d7614-a61b-4dd8-bfda-e086b3dc3317", "address": "fa:16:3e:9c:bd:c3", "network": {"id": "08b060f5-b54c-44fb-b154-44f39707094d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1693424751", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.164", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "398aed99e10d457e9cadda3239b27831", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape42d7614-a6", "ovs_interfaceid": "e42d7614-a61b-4dd8-bfda-e086b3dc3317", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1975.247028] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:aa:35:57', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24ec44b7-0acf-4ff9-8bb3-4641b74af7a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4ee262da-da6d-457d-a523-6d14746195e0', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:47:fc:3a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '678ebbe4-4c53-4eaf-a689-93981310f37d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7f34208d-7594-4a0b-8fef-8e970496dc46', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:9c:bd:c3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24ec44b7-0acf-4ff9-8bb3-4641b74af7a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e42d7614-a61b-4dd8-bfda-e086b3dc3317', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1975.256258] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Creating folder: Project (398aed99e10d457e9cadda3239b27831). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1975.256632] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b9bc3bfd-480c-4a16-88ab-a0d8deb5f8f2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.268139] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Created folder: Project (398aed99e10d457e9cadda3239b27831) in parent group-v421118. [ 1975.268366] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Creating folder: Instances. Parent ref: group-v421291. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1975.269026] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-97ebff40-43e0-408f-a01e-3eb1fd3775ca {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.280119] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Created folder: Instances in parent group-v421291. [ 1975.280462] env[62684]: DEBUG oslo.service.loopingcall [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1975.280727] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1975.280950] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bbdaaf53-4e01-4f86-bb50-1a8020db5803 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.308009] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1975.308009] env[62684]: value = "task-2052994" [ 1975.308009] env[62684]: _type = "Task" [ 1975.308009] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1975.315869] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052994, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.419100] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ef189445-046c-4319-9caf-f26f5a3e9368 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Lock "dab11b88-ac23-43f0-9203-024faf41e1f5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.325s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1975.522354] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.574s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1975.522903] env[62684]: DEBUG nova.compute.manager [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1975.525750] env[62684]: DEBUG oslo_concurrency.lockutils [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.017s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1975.527165] env[62684]: INFO nova.compute.claims [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1975.820518] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052994, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.845332] env[62684]: DEBUG nova.network.neutron [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Successfully updated port: 0a96e2ce-2335-44e2-940d-26d3afbafa3a {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1975.886094] env[62684]: DEBUG nova.compute.manager [req-2986eaa0-4aa7-42f9-94f3-43c39b85be5e req-6e2cbaab-0d9e-4a52-9188-32dd49e22c05 service nova] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Received event network-vif-plugged-0a96e2ce-2335-44e2-940d-26d3afbafa3a {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1975.886094] env[62684]: DEBUG oslo_concurrency.lockutils [req-2986eaa0-4aa7-42f9-94f3-43c39b85be5e req-6e2cbaab-0d9e-4a52-9188-32dd49e22c05 service nova] Acquiring lock "57537508-06e7-43a4-95c5-c4399b8bf93f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1975.886094] env[62684]: DEBUG oslo_concurrency.lockutils [req-2986eaa0-4aa7-42f9-94f3-43c39b85be5e req-6e2cbaab-0d9e-4a52-9188-32dd49e22c05 service nova] Lock "57537508-06e7-43a4-95c5-c4399b8bf93f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1975.886235] env[62684]: DEBUG oslo_concurrency.lockutils [req-2986eaa0-4aa7-42f9-94f3-43c39b85be5e req-6e2cbaab-0d9e-4a52-9188-32dd49e22c05 service nova] Lock "57537508-06e7-43a4-95c5-c4399b8bf93f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1975.886514] env[62684]: DEBUG nova.compute.manager [req-2986eaa0-4aa7-42f9-94f3-43c39b85be5e req-6e2cbaab-0d9e-4a52-9188-32dd49e22c05 service nova] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] No waiting events found dispatching network-vif-plugged-0a96e2ce-2335-44e2-940d-26d3afbafa3a {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1975.886697] env[62684]: WARNING nova.compute.manager [req-2986eaa0-4aa7-42f9-94f3-43c39b85be5e req-6e2cbaab-0d9e-4a52-9188-32dd49e22c05 service nova] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Received unexpected event network-vif-plugged-0a96e2ce-2335-44e2-940d-26d3afbafa3a for instance with vm_state building and task_state spawning. [ 1976.031999] env[62684]: DEBUG nova.compute.utils [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1976.035803] env[62684]: DEBUG nova.compute.manager [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1976.035977] env[62684]: DEBUG nova.network.neutron [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1976.143558] env[62684]: DEBUG nova.policy [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2fab3230b61d440e93d1d0a975115405', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '27d04006afc747e19ad87238bfdbaad1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1976.327936] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052994, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.347911] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "refresh_cache-57537508-06e7-43a4-95c5-c4399b8bf93f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1976.348179] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquired lock "refresh_cache-57537508-06e7-43a4-95c5-c4399b8bf93f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1976.348366] env[62684]: DEBUG nova.network.neutron [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1976.362284] env[62684]: DEBUG nova.network.neutron [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Port 39c750a6-1076-4354-bc30-d7f50ca821b5 binding to destination host cpu-1 is already ACTIVE {{(pid=62684) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1976.362640] env[62684]: DEBUG oslo_concurrency.lockutils [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquiring lock "refresh_cache-02dc8c41-5092-4f84-9722-37d4df3a459a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1976.362853] env[62684]: DEBUG oslo_concurrency.lockutils [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquired lock "refresh_cache-02dc8c41-5092-4f84-9722-37d4df3a459a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1976.363103] env[62684]: DEBUG nova.network.neutron [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1976.537145] env[62684]: DEBUG nova.compute.manager [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1976.718800] env[62684]: DEBUG nova.network.neutron [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Successfully created port: f5c06971-b96a-4fa0-858e-5e47100e2e68 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1976.827251] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052994, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.900055] env[62684]: DEBUG nova.network.neutron [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1976.988460] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89f929aa-3e40-4979-a881-f5608b6ddf60 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.997085] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d2f10f4-d897-49a6-a527-cae1c4b89552 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.033851] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3637b28a-764c-4c86-960a-352f520d7cd7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.042258] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50b49656-0cb8-403d-bb7b-bcafb029f473 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.059684] env[62684]: DEBUG nova.compute.provider_tree [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1977.104436] env[62684]: DEBUG nova.network.neutron [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Updating instance_info_cache with network_info: [{"id": "0a96e2ce-2335-44e2-940d-26d3afbafa3a", "address": "fa:16:3e:6d:b8:02", "network": {"id": "1751424b-54a9-4879-9f32-aa15a9bb632c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-120070593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "263c101fcc5e493789b79dfd1ba97cc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92e4d027-e755-417b-8eea-9a8f24b85140", "external-id": "nsx-vlan-transportzone-756", "segmentation_id": 756, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a96e2ce-23", "ovs_interfaceid": "0a96e2ce-2335-44e2-940d-26d3afbafa3a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1977.320363] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052994, 'name': CreateVM_Task, 'duration_secs': 1.643656} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1977.320508] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1977.321426] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1977.321611] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1977.321946] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1977.322232] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-856ac224-3c08-4712-9858-c29bcb2aa265 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.327028] env[62684]: DEBUG oslo_vmware.api [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Waiting for the task: (returnval){ [ 1977.327028] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f22501-9319-bf8e-0a88-8a123f2fa25f" [ 1977.327028] env[62684]: _type = "Task" [ 1977.327028] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1977.338022] env[62684]: DEBUG oslo_vmware.api [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f22501-9319-bf8e-0a88-8a123f2fa25f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1977.552070] env[62684]: DEBUG nova.compute.manager [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1977.563275] env[62684]: DEBUG nova.scheduler.client.report [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1977.579544] env[62684]: DEBUG nova.virt.hardware [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1977.579864] env[62684]: DEBUG nova.virt.hardware [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1977.580112] env[62684]: DEBUG nova.virt.hardware [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1977.580371] env[62684]: DEBUG nova.virt.hardware [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1977.580583] env[62684]: DEBUG nova.virt.hardware [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1977.580790] env[62684]: DEBUG nova.virt.hardware [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1977.581071] env[62684]: DEBUG nova.virt.hardware [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1977.581308] env[62684]: DEBUG nova.virt.hardware [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1977.581555] env[62684]: DEBUG nova.virt.hardware [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1977.581795] env[62684]: DEBUG nova.virt.hardware [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1977.582053] env[62684]: DEBUG nova.virt.hardware [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1977.582948] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d4715d8-0690-4906-81fa-6c1cb2eed27f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.592426] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4d96eae-9657-421b-a460-6477f4a17c4b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.606313] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Releasing lock "refresh_cache-57537508-06e7-43a4-95c5-c4399b8bf93f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1977.606692] env[62684]: DEBUG nova.compute.manager [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Instance network_info: |[{"id": "0a96e2ce-2335-44e2-940d-26d3afbafa3a", "address": "fa:16:3e:6d:b8:02", "network": {"id": "1751424b-54a9-4879-9f32-aa15a9bb632c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-120070593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "263c101fcc5e493789b79dfd1ba97cc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92e4d027-e755-417b-8eea-9a8f24b85140", "external-id": "nsx-vlan-transportzone-756", "segmentation_id": 756, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a96e2ce-23", "ovs_interfaceid": "0a96e2ce-2335-44e2-940d-26d3afbafa3a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1977.607299] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6d:b8:02', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92e4d027-e755-417b-8eea-9a8f24b85140', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0a96e2ce-2335-44e2-940d-26d3afbafa3a', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1977.614781] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Creating folder: Project (263c101fcc5e493789b79dfd1ba97cc0). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1977.615163] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ef1452f0-7c0c-451f-bdde-78b0536a751f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.623626] env[62684]: DEBUG nova.network.neutron [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Updating instance_info_cache with network_info: [{"id": "39c750a6-1076-4354-bc30-d7f50ca821b5", "address": "fa:16:3e:16:fe:89", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.34", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39c750a6-10", "ovs_interfaceid": "39c750a6-1076-4354-bc30-d7f50ca821b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1977.625727] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Created folder: Project (263c101fcc5e493789b79dfd1ba97cc0) in parent group-v421118. [ 1977.625988] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Creating folder: Instances. Parent ref: group-v421294. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1977.626491] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f06db943-1910-4381-ac14-d2caa5fe9646 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.637577] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Created folder: Instances in parent group-v421294. [ 1977.637813] env[62684]: DEBUG oslo.service.loopingcall [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1977.638009] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1977.638217] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e3c47b09-f984-4293-be0b-6ee311f63f7c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.656957] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1977.656957] env[62684]: value = "task-2052997" [ 1977.656957] env[62684]: _type = "Task" [ 1977.656957] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1977.667673] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052997, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1977.837692] env[62684]: DEBUG oslo_vmware.api [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f22501-9319-bf8e-0a88-8a123f2fa25f, 'name': SearchDatastore_Task, 'duration_secs': 0.030436} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1977.838026] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1977.838286] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1977.838531] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1977.838728] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1977.838930] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1977.839201] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9065262c-8b0b-4edb-89ec-b0ce1078195c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.849744] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1977.849928] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1977.850638] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d68e3ef-db36-4248-a89f-9b69a4c408c9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.855525] env[62684]: DEBUG oslo_vmware.api [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Waiting for the task: (returnval){ [ 1977.855525] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5274b655-7660-e0b5-e233-97043bef9034" [ 1977.855525] env[62684]: _type = "Task" [ 1977.855525] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1977.862933] env[62684]: DEBUG oslo_vmware.api [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5274b655-7660-e0b5-e233-97043bef9034, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1978.069836] env[62684]: DEBUG oslo_concurrency.lockutils [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.544s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1978.070595] env[62684]: DEBUG nova.compute.manager [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1978.077051] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 31.426s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1978.077296] env[62684]: DEBUG nova.objects.instance [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62684) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1978.106918] env[62684]: DEBUG nova.compute.manager [req-9ce031b1-daca-48fb-9434-dcad47009c29 req-19c39374-0dd1-4010-92ab-201954289114 service nova] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Received event network-changed-6da08bf6-6c5b-41a3-90e2-d17b27a734e4 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1978.107689] env[62684]: DEBUG nova.compute.manager [req-9ce031b1-daca-48fb-9434-dcad47009c29 req-19c39374-0dd1-4010-92ab-201954289114 service nova] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Refreshing instance network info cache due to event network-changed-6da08bf6-6c5b-41a3-90e2-d17b27a734e4. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1978.107917] env[62684]: DEBUG oslo_concurrency.lockutils [req-9ce031b1-daca-48fb-9434-dcad47009c29 req-19c39374-0dd1-4010-92ab-201954289114 service nova] Acquiring lock "refresh_cache-dab11b88-ac23-43f0-9203-024faf41e1f5" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1978.108329] env[62684]: DEBUG oslo_concurrency.lockutils [req-9ce031b1-daca-48fb-9434-dcad47009c29 req-19c39374-0dd1-4010-92ab-201954289114 service nova] Acquired lock "refresh_cache-dab11b88-ac23-43f0-9203-024faf41e1f5" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1978.108396] env[62684]: DEBUG nova.network.neutron [req-9ce031b1-daca-48fb-9434-dcad47009c29 req-19c39374-0dd1-4010-92ab-201954289114 service nova] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Refreshing network info cache for port 6da08bf6-6c5b-41a3-90e2-d17b27a734e4 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1978.127674] env[62684]: DEBUG oslo_concurrency.lockutils [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Releasing lock "refresh_cache-02dc8c41-5092-4f84-9722-37d4df3a459a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1978.166694] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2052997, 'name': CreateVM_Task, 'duration_secs': 0.494095} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1978.166854] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1978.167566] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1978.167738] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1978.168080] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1978.168836] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65600cd4-23b6-49c4-a25c-007efcc333ce {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.173267] env[62684]: DEBUG oslo_vmware.api [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 1978.173267] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5279c052-85f2-4cc0-d238-702e40453585" [ 1978.173267] env[62684]: _type = "Task" [ 1978.173267] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1978.181242] env[62684]: DEBUG oslo_vmware.api [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5279c052-85f2-4cc0-d238-702e40453585, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1978.365413] env[62684]: DEBUG oslo_vmware.api [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5274b655-7660-e0b5-e233-97043bef9034, 'name': SearchDatastore_Task, 'duration_secs': 0.010351} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1978.366195] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de208031-5498-41fe-846b-374ad9544c66 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.371914] env[62684]: DEBUG oslo_vmware.api [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Waiting for the task: (returnval){ [ 1978.371914] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52fc715d-43fb-45ca-a0e8-f5fe83617925" [ 1978.371914] env[62684]: _type = "Task" [ 1978.371914] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1978.380044] env[62684]: DEBUG oslo_vmware.api [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52fc715d-43fb-45ca-a0e8-f5fe83617925, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1978.588018] env[62684]: DEBUG nova.compute.utils [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1978.590405] env[62684]: DEBUG nova.compute.manager [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1978.590405] env[62684]: DEBUG nova.network.neutron [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1978.630830] env[62684]: DEBUG nova.compute.manager [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62684) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:898}} [ 1978.632034] env[62684]: DEBUG oslo_concurrency.lockutils [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1978.663392] env[62684]: DEBUG nova.policy [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '15ae7a383f294208909e3763b5429340', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bd812751722143fabedfa986a2d98b59', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1978.685020] env[62684]: DEBUG oslo_vmware.api [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5279c052-85f2-4cc0-d238-702e40453585, 'name': SearchDatastore_Task, 'duration_secs': 0.009832} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1978.685020] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1978.685020] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1978.685020] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1978.833896] env[62684]: DEBUG nova.network.neutron [req-9ce031b1-daca-48fb-9434-dcad47009c29 req-19c39374-0dd1-4010-92ab-201954289114 service nova] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Updated VIF entry in instance network info cache for port 6da08bf6-6c5b-41a3-90e2-d17b27a734e4. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1978.834309] env[62684]: DEBUG nova.network.neutron [req-9ce031b1-daca-48fb-9434-dcad47009c29 req-19c39374-0dd1-4010-92ab-201954289114 service nova] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Updating instance_info_cache with network_info: [{"id": "6da08bf6-6c5b-41a3-90e2-d17b27a734e4", "address": "fa:16:3e:d2:cc:6c", "network": {"id": "899020b7-a29e-4a35-bf3c-f9aebda1208d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1035902693-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "feab568b5c9e41bfa2ca824d44bcc4e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "06eaa4c9-dbc2-4d38-a844-7bf76e7b5a64", "external-id": "nsx-vlan-transportzone-804", "segmentation_id": 804, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6da08bf6-6c", "ovs_interfaceid": "6da08bf6-6c5b-41a3-90e2-d17b27a734e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1978.883060] env[62684]: DEBUG oslo_vmware.api [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52fc715d-43fb-45ca-a0e8-f5fe83617925, 'name': SearchDatastore_Task, 'duration_secs': 0.009763} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1978.883351] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1978.883616] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21/2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1978.883897] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1978.884104] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1978.884330] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-79cb54ed-9eb2-4e41-bab6-8aeab55d8dbc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.886376] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1c830d08-8fa3-4ebb-a802-d6e22c5108b5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.895223] env[62684]: DEBUG oslo_vmware.api [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Waiting for the task: (returnval){ [ 1978.895223] env[62684]: value = "task-2052998" [ 1978.895223] env[62684]: _type = "Task" [ 1978.895223] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1978.900234] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1978.900528] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1978.901890] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3f305c0-96d4-49f2-9b60-d7788692d198 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.908573] env[62684]: DEBUG oslo_vmware.api [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': task-2052998, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1978.913120] env[62684]: DEBUG oslo_vmware.api [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 1978.913120] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5229a241-0a63-8e20-e08c-ccefcbf9df4d" [ 1978.913120] env[62684]: _type = "Task" [ 1978.913120] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1978.920422] env[62684]: DEBUG oslo_vmware.api [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5229a241-0a63-8e20-e08c-ccefcbf9df4d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1979.091743] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2263f1fe-32ff-45c9-b654-3474591ac4bf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1979.093123] env[62684]: DEBUG oslo_concurrency.lockutils [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.171s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1979.095390] env[62684]: INFO nova.compute.claims [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1979.099406] env[62684]: DEBUG nova.compute.manager [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1979.337389] env[62684]: DEBUG oslo_concurrency.lockutils [req-9ce031b1-daca-48fb-9434-dcad47009c29 req-19c39374-0dd1-4010-92ab-201954289114 service nova] Releasing lock "refresh_cache-dab11b88-ac23-43f0-9203-024faf41e1f5" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1979.338184] env[62684]: DEBUG nova.compute.manager [req-9ce031b1-daca-48fb-9434-dcad47009c29 req-19c39374-0dd1-4010-92ab-201954289114 service nova] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Received event network-changed-0a96e2ce-2335-44e2-940d-26d3afbafa3a {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1979.338535] env[62684]: DEBUG nova.compute.manager [req-9ce031b1-daca-48fb-9434-dcad47009c29 req-19c39374-0dd1-4010-92ab-201954289114 service nova] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Refreshing instance network info cache due to event network-changed-0a96e2ce-2335-44e2-940d-26d3afbafa3a. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1979.340231] env[62684]: DEBUG oslo_concurrency.lockutils [req-9ce031b1-daca-48fb-9434-dcad47009c29 req-19c39374-0dd1-4010-92ab-201954289114 service nova] Acquiring lock "refresh_cache-57537508-06e7-43a4-95c5-c4399b8bf93f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1979.340231] env[62684]: DEBUG oslo_concurrency.lockutils [req-9ce031b1-daca-48fb-9434-dcad47009c29 req-19c39374-0dd1-4010-92ab-201954289114 service nova] Acquired lock "refresh_cache-57537508-06e7-43a4-95c5-c4399b8bf93f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1979.340231] env[62684]: DEBUG nova.network.neutron [req-9ce031b1-daca-48fb-9434-dcad47009c29 req-19c39374-0dd1-4010-92ab-201954289114 service nova] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Refreshing network info cache for port 0a96e2ce-2335-44e2-940d-26d3afbafa3a {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1979.410540] env[62684]: DEBUG oslo_vmware.api [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': task-2052998, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.446151} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1979.410801] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21/2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1979.411254] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1979.411334] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1c3c1fe0-ff4a-4010-9135-0437dd803665 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.421592] env[62684]: DEBUG oslo_vmware.api [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5229a241-0a63-8e20-e08c-ccefcbf9df4d, 'name': SearchDatastore_Task, 'duration_secs': 0.018448} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1979.423374] env[62684]: DEBUG oslo_vmware.api [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Waiting for the task: (returnval){ [ 1979.423374] env[62684]: value = "task-2052999" [ 1979.423374] env[62684]: _type = "Task" [ 1979.423374] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1979.423622] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ebbe47b-83a9-4b85-8231-814cf54607ce {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.431644] env[62684]: DEBUG oslo_vmware.api [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 1979.431644] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529bafd4-bfa2-d8df-2901-2dc732435689" [ 1979.431644] env[62684]: _type = "Task" [ 1979.431644] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1979.434934] env[62684]: DEBUG oslo_vmware.api [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': task-2052999, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1979.442404] env[62684]: DEBUG oslo_vmware.api [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529bafd4-bfa2-d8df-2901-2dc732435689, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1979.494024] env[62684]: DEBUG nova.network.neutron [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Successfully updated port: f5c06971-b96a-4fa0-858e-5e47100e2e68 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1979.510282] env[62684]: DEBUG nova.network.neutron [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Successfully created port: 3fcb3920-5e10-45e2-865d-cc9b89a1e335 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1979.936765] env[62684]: DEBUG oslo_vmware.api [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': task-2052999, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064572} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1979.940170] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1979.940922] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09025282-f504-463d-9671-20b001dae2fc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.948921] env[62684]: DEBUG oslo_vmware.api [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529bafd4-bfa2-d8df-2901-2dc732435689, 'name': SearchDatastore_Task, 'duration_secs': 0.009968} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1979.964937] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1979.965263] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 57537508-06e7-43a4-95c5-c4399b8bf93f/57537508-06e7-43a4-95c5-c4399b8bf93f.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1979.974493] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21/2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1979.974776] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-53ab3192-4442-489d-ba44-fbf49ce6e5d2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.976999] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-45fea0e0-9fc6-48aa-b68b-9297725f3863 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.997500] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "refresh_cache-0156d807-1ab4-482f-91d1-172bf32bf23c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1979.997616] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquired lock "refresh_cache-0156d807-1ab4-482f-91d1-172bf32bf23c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1979.997797] env[62684]: DEBUG nova.network.neutron [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1979.999229] env[62684]: DEBUG oslo_vmware.api [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 1979.999229] env[62684]: value = "task-2053000" [ 1979.999229] env[62684]: _type = "Task" [ 1979.999229] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1980.001161] env[62684]: DEBUG oslo_vmware.api [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Waiting for the task: (returnval){ [ 1980.001161] env[62684]: value = "task-2053001" [ 1980.001161] env[62684]: _type = "Task" [ 1980.001161] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1980.019790] env[62684]: DEBUG oslo_vmware.api [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': task-2053001, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1980.020208] env[62684]: DEBUG oslo_vmware.api [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053000, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1980.118723] env[62684]: DEBUG nova.compute.manager [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1980.148336] env[62684]: DEBUG nova.virt.hardware [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1980.148618] env[62684]: DEBUG nova.virt.hardware [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1980.148801] env[62684]: DEBUG nova.virt.hardware [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1980.149029] env[62684]: DEBUG nova.virt.hardware [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1980.149200] env[62684]: DEBUG nova.virt.hardware [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1980.149394] env[62684]: DEBUG nova.virt.hardware [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1980.149669] env[62684]: DEBUG nova.virt.hardware [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1980.149799] env[62684]: DEBUG nova.virt.hardware [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1980.149990] env[62684]: DEBUG nova.virt.hardware [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1980.150198] env[62684]: DEBUG nova.virt.hardware [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1980.150396] env[62684]: DEBUG nova.virt.hardware [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1980.151338] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f7bfee3-e690-44cd-9244-240847e40bf9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.164654] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df56aeba-6a39-44fe-8e0a-179cdb48ebe9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.295537] env[62684]: DEBUG nova.network.neutron [req-9ce031b1-daca-48fb-9434-dcad47009c29 req-19c39374-0dd1-4010-92ab-201954289114 service nova] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Updated VIF entry in instance network info cache for port 0a96e2ce-2335-44e2-940d-26d3afbafa3a. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1980.296008] env[62684]: DEBUG nova.network.neutron [req-9ce031b1-daca-48fb-9434-dcad47009c29 req-19c39374-0dd1-4010-92ab-201954289114 service nova] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Updating instance_info_cache with network_info: [{"id": "0a96e2ce-2335-44e2-940d-26d3afbafa3a", "address": "fa:16:3e:6d:b8:02", "network": {"id": "1751424b-54a9-4879-9f32-aa15a9bb632c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-120070593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "263c101fcc5e493789b79dfd1ba97cc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92e4d027-e755-417b-8eea-9a8f24b85140", "external-id": "nsx-vlan-transportzone-756", "segmentation_id": 756, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a96e2ce-23", "ovs_interfaceid": "0a96e2ce-2335-44e2-940d-26d3afbafa3a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1980.316806] env[62684]: DEBUG nova.compute.manager [req-b3fc006b-cd2a-4e22-9dab-f8f7bbce98aa req-babfcdb1-b85d-4566-8551-03e0df812c6b service nova] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Received event network-vif-plugged-f5c06971-b96a-4fa0-858e-5e47100e2e68 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1980.317048] env[62684]: DEBUG oslo_concurrency.lockutils [req-b3fc006b-cd2a-4e22-9dab-f8f7bbce98aa req-babfcdb1-b85d-4566-8551-03e0df812c6b service nova] Acquiring lock "0156d807-1ab4-482f-91d1-172bf32bf23c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1980.317266] env[62684]: DEBUG oslo_concurrency.lockutils [req-b3fc006b-cd2a-4e22-9dab-f8f7bbce98aa req-babfcdb1-b85d-4566-8551-03e0df812c6b service nova] Lock "0156d807-1ab4-482f-91d1-172bf32bf23c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1980.317463] env[62684]: DEBUG oslo_concurrency.lockutils [req-b3fc006b-cd2a-4e22-9dab-f8f7bbce98aa req-babfcdb1-b85d-4566-8551-03e0df812c6b service nova] Lock "0156d807-1ab4-482f-91d1-172bf32bf23c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1980.317661] env[62684]: DEBUG nova.compute.manager [req-b3fc006b-cd2a-4e22-9dab-f8f7bbce98aa req-babfcdb1-b85d-4566-8551-03e0df812c6b service nova] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] No waiting events found dispatching network-vif-plugged-f5c06971-b96a-4fa0-858e-5e47100e2e68 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1980.317826] env[62684]: WARNING nova.compute.manager [req-b3fc006b-cd2a-4e22-9dab-f8f7bbce98aa req-babfcdb1-b85d-4566-8551-03e0df812c6b service nova] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Received unexpected event network-vif-plugged-f5c06971-b96a-4fa0-858e-5e47100e2e68 for instance with vm_state building and task_state spawning. [ 1980.317987] env[62684]: DEBUG nova.compute.manager [req-b3fc006b-cd2a-4e22-9dab-f8f7bbce98aa req-babfcdb1-b85d-4566-8551-03e0df812c6b service nova] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Received event network-changed-f5c06971-b96a-4fa0-858e-5e47100e2e68 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1980.318225] env[62684]: DEBUG nova.compute.manager [req-b3fc006b-cd2a-4e22-9dab-f8f7bbce98aa req-babfcdb1-b85d-4566-8551-03e0df812c6b service nova] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Refreshing instance network info cache due to event network-changed-f5c06971-b96a-4fa0-858e-5e47100e2e68. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1980.318401] env[62684]: DEBUG oslo_concurrency.lockutils [req-b3fc006b-cd2a-4e22-9dab-f8f7bbce98aa req-babfcdb1-b85d-4566-8551-03e0df812c6b service nova] Acquiring lock "refresh_cache-0156d807-1ab4-482f-91d1-172bf32bf23c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1980.523128] env[62684]: DEBUG oslo_vmware.api [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053000, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1980.526759] env[62684]: DEBUG oslo_vmware.api [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': task-2053001, 'name': ReconfigVM_Task, 'duration_secs': 0.273076} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1980.527469] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Reconfigured VM instance instance-0000003c to attach disk [datastore1] 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21/2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1980.528670] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3acae7db-8769-45cb-87be-06e73de2eee0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.540155] env[62684]: DEBUG oslo_vmware.api [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Waiting for the task: (returnval){ [ 1980.540155] env[62684]: value = "task-2053002" [ 1980.540155] env[62684]: _type = "Task" [ 1980.540155] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1980.559342] env[62684]: DEBUG oslo_vmware.api [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': task-2053002, 'name': Rename_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1980.560818] env[62684]: DEBUG nova.network.neutron [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1980.640611] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fdd3673-aa75-495e-a998-e84c6e67cc8a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.653016] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2be78917-dcd8-4918-ad1e-d366025053f3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.682349] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-236d4270-babb-497a-8a6c-8ce1150b613a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.689886] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bdf78ee-77d6-4911-a9ce-a26e8c16e686 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.703659] env[62684]: DEBUG nova.compute.provider_tree [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1980.799095] env[62684]: DEBUG oslo_concurrency.lockutils [req-9ce031b1-daca-48fb-9434-dcad47009c29 req-19c39374-0dd1-4010-92ab-201954289114 service nova] Releasing lock "refresh_cache-57537508-06e7-43a4-95c5-c4399b8bf93f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1980.799421] env[62684]: DEBUG nova.compute.manager [req-9ce031b1-daca-48fb-9434-dcad47009c29 req-19c39374-0dd1-4010-92ab-201954289114 service nova] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Received event network-changed-6da08bf6-6c5b-41a3-90e2-d17b27a734e4 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1980.799643] env[62684]: DEBUG nova.compute.manager [req-9ce031b1-daca-48fb-9434-dcad47009c29 req-19c39374-0dd1-4010-92ab-201954289114 service nova] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Refreshing instance network info cache due to event network-changed-6da08bf6-6c5b-41a3-90e2-d17b27a734e4. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1980.799816] env[62684]: DEBUG oslo_concurrency.lockutils [req-9ce031b1-daca-48fb-9434-dcad47009c29 req-19c39374-0dd1-4010-92ab-201954289114 service nova] Acquiring lock "refresh_cache-dab11b88-ac23-43f0-9203-024faf41e1f5" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1980.799976] env[62684]: DEBUG oslo_concurrency.lockutils [req-9ce031b1-daca-48fb-9434-dcad47009c29 req-19c39374-0dd1-4010-92ab-201954289114 service nova] Acquired lock "refresh_cache-dab11b88-ac23-43f0-9203-024faf41e1f5" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1980.800356] env[62684]: DEBUG nova.network.neutron [req-9ce031b1-daca-48fb-9434-dcad47009c29 req-19c39374-0dd1-4010-92ab-201954289114 service nova] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Refreshing network info cache for port 6da08bf6-6c5b-41a3-90e2-d17b27a734e4 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1980.818124] env[62684]: DEBUG nova.network.neutron [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Updating instance_info_cache with network_info: [{"id": "f5c06971-b96a-4fa0-858e-5e47100e2e68", "address": "fa:16:3e:9e:fc:9d", "network": {"id": "e177c6d0-ddd5-4029-94af-c8f1b937dd9f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1344612161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27d04006afc747e19ad87238bfdbaad1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "171aeae0-6a27-44fc-bc3d-a2d5581fc702", "external-id": "nsx-vlan-transportzone-410", "segmentation_id": 410, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5c06971-b9", "ovs_interfaceid": "f5c06971-b96a-4fa0-858e-5e47100e2e68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1981.016611] env[62684]: DEBUG oslo_vmware.api [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053000, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.63287} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1981.016611] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 57537508-06e7-43a4-95c5-c4399b8bf93f/57537508-06e7-43a4-95c5-c4399b8bf93f.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1981.016611] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1981.016611] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eb2980fc-dece-4e89-90ce-b2b31402d8f5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.023348] env[62684]: DEBUG oslo_vmware.api [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 1981.023348] env[62684]: value = "task-2053003" [ 1981.023348] env[62684]: _type = "Task" [ 1981.023348] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1981.032527] env[62684]: DEBUG oslo_vmware.api [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053003, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1981.050248] env[62684]: DEBUG oslo_vmware.api [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': task-2053002, 'name': Rename_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1981.225660] env[62684]: ERROR nova.scheduler.client.report [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [req-b047283e-e089-4c7b-b230-5dffe4a816f4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c23c281e-ec1f-4876-972e-a98655f2084f. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-b047283e-e089-4c7b-b230-5dffe4a816f4"}]} [ 1981.243530] env[62684]: DEBUG nova.scheduler.client.report [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Refreshing inventories for resource provider c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1981.260332] env[62684]: DEBUG nova.scheduler.client.report [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Updating ProviderTree inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1981.260578] env[62684]: DEBUG nova.compute.provider_tree [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1981.275286] env[62684]: DEBUG nova.scheduler.client.report [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Refreshing aggregate associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, aggregates: None {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1981.296242] env[62684]: DEBUG nova.scheduler.client.report [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Refreshing trait associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1981.322387] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Releasing lock "refresh_cache-0156d807-1ab4-482f-91d1-172bf32bf23c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1981.322720] env[62684]: DEBUG nova.compute.manager [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Instance network_info: |[{"id": "f5c06971-b96a-4fa0-858e-5e47100e2e68", "address": "fa:16:3e:9e:fc:9d", "network": {"id": "e177c6d0-ddd5-4029-94af-c8f1b937dd9f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1344612161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27d04006afc747e19ad87238bfdbaad1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "171aeae0-6a27-44fc-bc3d-a2d5581fc702", "external-id": "nsx-vlan-transportzone-410", "segmentation_id": 410, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5c06971-b9", "ovs_interfaceid": "f5c06971-b96a-4fa0-858e-5e47100e2e68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1981.323872] env[62684]: DEBUG oslo_concurrency.lockutils [req-b3fc006b-cd2a-4e22-9dab-f8f7bbce98aa req-babfcdb1-b85d-4566-8551-03e0df812c6b service nova] Acquired lock "refresh_cache-0156d807-1ab4-482f-91d1-172bf32bf23c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1981.323872] env[62684]: DEBUG nova.network.neutron [req-b3fc006b-cd2a-4e22-9dab-f8f7bbce98aa req-babfcdb1-b85d-4566-8551-03e0df812c6b service nova] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Refreshing network info cache for port f5c06971-b96a-4fa0-858e-5e47100e2e68 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1981.324597] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9e:fc:9d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '171aeae0-6a27-44fc-bc3d-a2d5581fc702', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f5c06971-b96a-4fa0-858e-5e47100e2e68', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1981.332327] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Creating folder: Project (27d04006afc747e19ad87238bfdbaad1). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1981.332794] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c5b28079-6380-425a-9cbc-5ec1f0777f6d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.344129] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Created folder: Project (27d04006afc747e19ad87238bfdbaad1) in parent group-v421118. [ 1981.344422] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Creating folder: Instances. Parent ref: group-v421297. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1981.344674] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-12c050a0-ac99-4a5d-b7dd-c26c924b3f06 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.354560] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Created folder: Instances in parent group-v421297. [ 1981.354793] env[62684]: DEBUG oslo.service.loopingcall [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1981.355229] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1981.355441] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0bb8022e-a042-4f34-bff4-94b4c26fb6bc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.386060] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1981.386060] env[62684]: value = "task-2053006" [ 1981.386060] env[62684]: _type = "Task" [ 1981.386060] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1981.393961] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053006, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1981.533966] env[62684]: DEBUG oslo_vmware.api [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053003, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076718} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1981.536554] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1981.537540] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8285343c-b5f4-4f4a-8fda-bddc1311ba48 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.563821] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] 57537508-06e7-43a4-95c5-c4399b8bf93f/57537508-06e7-43a4-95c5-c4399b8bf93f.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1981.567264] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6285f492-5e31-4e05-9293-7b2e18e614e4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.588138] env[62684]: DEBUG oslo_vmware.api [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': task-2053002, 'name': Rename_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1981.592981] env[62684]: DEBUG oslo_vmware.api [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 1981.592981] env[62684]: value = "task-2053007" [ 1981.592981] env[62684]: _type = "Task" [ 1981.592981] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1981.603382] env[62684]: DEBUG oslo_vmware.api [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053007, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1981.784693] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8852e184-73d5-4139-8535-6141a8503aa5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.793243] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37677879-7603-4b93-b385-b20a07c72e6d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.833282] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-131088c9-af3f-413a-992c-834eedaccba2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.846988] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0890e5a-83e7-498e-862f-d4274e4e631d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.865848] env[62684]: DEBUG nova.compute.provider_tree [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1981.869305] env[62684]: DEBUG nova.network.neutron [req-9ce031b1-daca-48fb-9434-dcad47009c29 req-19c39374-0dd1-4010-92ab-201954289114 service nova] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Updated VIF entry in instance network info cache for port 6da08bf6-6c5b-41a3-90e2-d17b27a734e4. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1981.869305] env[62684]: DEBUG nova.network.neutron [req-9ce031b1-daca-48fb-9434-dcad47009c29 req-19c39374-0dd1-4010-92ab-201954289114 service nova] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Updating instance_info_cache with network_info: [{"id": "6da08bf6-6c5b-41a3-90e2-d17b27a734e4", "address": "fa:16:3e:d2:cc:6c", "network": {"id": "899020b7-a29e-4a35-bf3c-f9aebda1208d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1035902693-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "feab568b5c9e41bfa2ca824d44bcc4e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "06eaa4c9-dbc2-4d38-a844-7bf76e7b5a64", "external-id": "nsx-vlan-transportzone-804", "segmentation_id": 804, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6da08bf6-6c", "ovs_interfaceid": "6da08bf6-6c5b-41a3-90e2-d17b27a734e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1981.901469] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053006, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1981.984307] env[62684]: DEBUG nova.network.neutron [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Successfully updated port: 3fcb3920-5e10-45e2-865d-cc9b89a1e335 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1982.055491] env[62684]: DEBUG oslo_vmware.api [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': task-2053002, 'name': Rename_Task, 'duration_secs': 1.164612} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1982.055843] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1982.056143] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-59b05e26-14c0-42be-aa29-fadc11235ecf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.064031] env[62684]: DEBUG oslo_vmware.api [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Waiting for the task: (returnval){ [ 1982.064031] env[62684]: value = "task-2053008" [ 1982.064031] env[62684]: _type = "Task" [ 1982.064031] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1982.078751] env[62684]: DEBUG oslo_vmware.api [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': task-2053008, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1982.103814] env[62684]: DEBUG oslo_vmware.api [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053007, 'name': ReconfigVM_Task, 'duration_secs': 0.485649} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1982.104304] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Reconfigured VM instance instance-0000003d to attach disk [datastore1] 57537508-06e7-43a4-95c5-c4399b8bf93f/57537508-06e7-43a4-95c5-c4399b8bf93f.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1982.105258] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5f1ce4fb-f7f0-4cd0-a6a6-95d34bf84bba {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.111916] env[62684]: DEBUG oslo_vmware.api [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 1982.111916] env[62684]: value = "task-2053009" [ 1982.111916] env[62684]: _type = "Task" [ 1982.111916] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1982.121123] env[62684]: DEBUG oslo_vmware.api [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053009, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1982.121944] env[62684]: DEBUG nova.network.neutron [req-b3fc006b-cd2a-4e22-9dab-f8f7bbce98aa req-babfcdb1-b85d-4566-8551-03e0df812c6b service nova] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Updated VIF entry in instance network info cache for port f5c06971-b96a-4fa0-858e-5e47100e2e68. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1982.123283] env[62684]: DEBUG nova.network.neutron [req-b3fc006b-cd2a-4e22-9dab-f8f7bbce98aa req-babfcdb1-b85d-4566-8551-03e0df812c6b service nova] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Updating instance_info_cache with network_info: [{"id": "f5c06971-b96a-4fa0-858e-5e47100e2e68", "address": "fa:16:3e:9e:fc:9d", "network": {"id": "e177c6d0-ddd5-4029-94af-c8f1b937dd9f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1344612161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27d04006afc747e19ad87238bfdbaad1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "171aeae0-6a27-44fc-bc3d-a2d5581fc702", "external-id": "nsx-vlan-transportzone-410", "segmentation_id": 410, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5c06971-b9", "ovs_interfaceid": "f5c06971-b96a-4fa0-858e-5e47100e2e68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1982.345820] env[62684]: DEBUG nova.compute.manager [req-84a0234c-4152-417a-b349-e7b569099434 req-f0e96b2b-ffc6-4a18-8846-68bbb069b482 service nova] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Received event network-vif-plugged-3fcb3920-5e10-45e2-865d-cc9b89a1e335 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1982.346045] env[62684]: DEBUG oslo_concurrency.lockutils [req-84a0234c-4152-417a-b349-e7b569099434 req-f0e96b2b-ffc6-4a18-8846-68bbb069b482 service nova] Acquiring lock "2baabe7a-ed33-4cef-9acc-a7b804610b0a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1982.346323] env[62684]: DEBUG oslo_concurrency.lockutils [req-84a0234c-4152-417a-b349-e7b569099434 req-f0e96b2b-ffc6-4a18-8846-68bbb069b482 service nova] Lock "2baabe7a-ed33-4cef-9acc-a7b804610b0a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1982.346523] env[62684]: DEBUG oslo_concurrency.lockutils [req-84a0234c-4152-417a-b349-e7b569099434 req-f0e96b2b-ffc6-4a18-8846-68bbb069b482 service nova] Lock "2baabe7a-ed33-4cef-9acc-a7b804610b0a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1982.346708] env[62684]: DEBUG nova.compute.manager [req-84a0234c-4152-417a-b349-e7b569099434 req-f0e96b2b-ffc6-4a18-8846-68bbb069b482 service nova] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] No waiting events found dispatching network-vif-plugged-3fcb3920-5e10-45e2-865d-cc9b89a1e335 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1982.346878] env[62684]: WARNING nova.compute.manager [req-84a0234c-4152-417a-b349-e7b569099434 req-f0e96b2b-ffc6-4a18-8846-68bbb069b482 service nova] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Received unexpected event network-vif-plugged-3fcb3920-5e10-45e2-865d-cc9b89a1e335 for instance with vm_state building and task_state spawning. [ 1982.347057] env[62684]: DEBUG nova.compute.manager [req-84a0234c-4152-417a-b349-e7b569099434 req-f0e96b2b-ffc6-4a18-8846-68bbb069b482 service nova] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Received event network-changed-3fcb3920-5e10-45e2-865d-cc9b89a1e335 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1982.347221] env[62684]: DEBUG nova.compute.manager [req-84a0234c-4152-417a-b349-e7b569099434 req-f0e96b2b-ffc6-4a18-8846-68bbb069b482 service nova] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Refreshing instance network info cache due to event network-changed-3fcb3920-5e10-45e2-865d-cc9b89a1e335. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1982.347440] env[62684]: DEBUG oslo_concurrency.lockutils [req-84a0234c-4152-417a-b349-e7b569099434 req-f0e96b2b-ffc6-4a18-8846-68bbb069b482 service nova] Acquiring lock "refresh_cache-2baabe7a-ed33-4cef-9acc-a7b804610b0a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1982.347597] env[62684]: DEBUG oslo_concurrency.lockutils [req-84a0234c-4152-417a-b349-e7b569099434 req-f0e96b2b-ffc6-4a18-8846-68bbb069b482 service nova] Acquired lock "refresh_cache-2baabe7a-ed33-4cef-9acc-a7b804610b0a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1982.347759] env[62684]: DEBUG nova.network.neutron [req-84a0234c-4152-417a-b349-e7b569099434 req-f0e96b2b-ffc6-4a18-8846-68bbb069b482 service nova] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Refreshing network info cache for port 3fcb3920-5e10-45e2-865d-cc9b89a1e335 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1982.367344] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Acquiring lock "7b29207a-7fa8-4374-819e-c046b2014969" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1982.367589] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Lock "7b29207a-7fa8-4374-819e-c046b2014969" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1982.372937] env[62684]: DEBUG oslo_concurrency.lockutils [req-9ce031b1-daca-48fb-9434-dcad47009c29 req-19c39374-0dd1-4010-92ab-201954289114 service nova] Releasing lock "refresh_cache-dab11b88-ac23-43f0-9203-024faf41e1f5" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1982.392563] env[62684]: ERROR nova.scheduler.client.report [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [req-4e058c84-73c9-4506-ad18-d31a51f82dd7] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c23c281e-ec1f-4876-972e-a98655f2084f. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-4e058c84-73c9-4506-ad18-d31a51f82dd7"}]} [ 1982.399048] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053006, 'name': CreateVM_Task, 'duration_secs': 0.604841} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1982.399048] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1982.399277] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1982.399322] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1982.399612] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1982.400320] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa0a092b-80d4-4a8a-afae-71213c42aaeb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.405190] env[62684]: DEBUG oslo_vmware.api [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 1982.405190] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c863e8-4dd0-9078-b75c-f05603e713fa" [ 1982.405190] env[62684]: _type = "Task" [ 1982.405190] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1982.410659] env[62684]: DEBUG nova.scheduler.client.report [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Refreshing inventories for resource provider c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1982.418593] env[62684]: DEBUG oslo_vmware.api [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c863e8-4dd0-9078-b75c-f05603e713fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1982.427472] env[62684]: DEBUG nova.scheduler.client.report [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Updating ProviderTree inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1982.427715] env[62684]: DEBUG nova.compute.provider_tree [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1982.440818] env[62684]: DEBUG nova.scheduler.client.report [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Refreshing aggregate associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, aggregates: None {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1982.460040] env[62684]: DEBUG nova.scheduler.client.report [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Refreshing trait associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1982.489543] env[62684]: DEBUG oslo_concurrency.lockutils [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquiring lock "refresh_cache-2baabe7a-ed33-4cef-9acc-a7b804610b0a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1982.576695] env[62684]: DEBUG oslo_vmware.api [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': task-2053008, 'name': PowerOnVM_Task, 'duration_secs': 0.470163} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1982.576978] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1982.577250] env[62684]: INFO nova.compute.manager [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Took 16.42 seconds to spawn the instance on the hypervisor. [ 1982.577449] env[62684]: DEBUG nova.compute.manager [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1982.578276] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91d8d203-00c9-435d-8007-ecde51893e33 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.624324] env[62684]: DEBUG oslo_vmware.api [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053009, 'name': Rename_Task, 'duration_secs': 0.152578} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1982.625617] env[62684]: DEBUG oslo_concurrency.lockutils [req-b3fc006b-cd2a-4e22-9dab-f8f7bbce98aa req-babfcdb1-b85d-4566-8551-03e0df812c6b service nova] Releasing lock "refresh_cache-0156d807-1ab4-482f-91d1-172bf32bf23c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1982.626357] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1982.626515] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ab328af8-3645-4c0b-9192-82ea1a3f0632 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.636244] env[62684]: DEBUG oslo_vmware.api [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 1982.636244] env[62684]: value = "task-2053010" [ 1982.636244] env[62684]: _type = "Task" [ 1982.636244] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1982.646287] env[62684]: DEBUG oslo_vmware.api [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053010, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1982.864632] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf27b8a5-150e-48b5-a428-d0c1a6c3c0f0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.869450] env[62684]: DEBUG nova.compute.manager [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1982.875161] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f27a9fe-217f-42cb-88d8-2ae45cb7e54e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.911377] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7657edc2-8833-433a-9f6a-b66e499903a2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.914503] env[62684]: DEBUG nova.network.neutron [req-84a0234c-4152-417a-b349-e7b569099434 req-f0e96b2b-ffc6-4a18-8846-68bbb069b482 service nova] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1982.923550] env[62684]: DEBUG oslo_vmware.api [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c863e8-4dd0-9078-b75c-f05603e713fa, 'name': SearchDatastore_Task, 'duration_secs': 0.011523} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1982.925474] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1982.925744] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1982.925981] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1982.926157] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1982.926330] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1982.926661] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8061176b-6dea-46f3-a3e4-d347fd9ba28c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.929294] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3108c6c5-fa15-48d6-93e8-8c13648f9f5d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.943933] env[62684]: DEBUG nova.compute.provider_tree [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1982.946962] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1982.947164] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1982.947877] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-664943c9-49db-4712-b9f2-254353b03500 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.953485] env[62684]: DEBUG oslo_vmware.api [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 1982.953485] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]526688dd-28ec-2965-c8df-f5ce2c9d2b5f" [ 1982.953485] env[62684]: _type = "Task" [ 1982.953485] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1982.961539] env[62684]: DEBUG oslo_vmware.api [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]526688dd-28ec-2965-c8df-f5ce2c9d2b5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1983.013461] env[62684]: DEBUG nova.network.neutron [req-84a0234c-4152-417a-b349-e7b569099434 req-f0e96b2b-ffc6-4a18-8846-68bbb069b482 service nova] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1983.102391] env[62684]: INFO nova.compute.manager [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Took 55.01 seconds to build instance. [ 1983.148347] env[62684]: DEBUG oslo_vmware.api [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053010, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1983.393829] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1983.469022] env[62684]: DEBUG oslo_vmware.api [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]526688dd-28ec-2965-c8df-f5ce2c9d2b5f, 'name': SearchDatastore_Task, 'duration_secs': 0.010265} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1983.471844] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df432cfe-c7cf-4b92-87b9-85dd00678bde {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.476549] env[62684]: DEBUG oslo_vmware.api [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 1983.476549] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52913b8c-75cd-f01c-89b3-e0f5829210b5" [ 1983.476549] env[62684]: _type = "Task" [ 1983.476549] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1983.484596] env[62684]: DEBUG oslo_vmware.api [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52913b8c-75cd-f01c-89b3-e0f5829210b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1983.486802] env[62684]: DEBUG nova.scheduler.client.report [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Updated inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f with generation 93 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1983.487105] env[62684]: DEBUG nova.compute.provider_tree [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Updating resource provider c23c281e-ec1f-4876-972e-a98655f2084f generation from 93 to 94 during operation: update_inventory {{(pid=62684) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1983.487305] env[62684]: DEBUG nova.compute.provider_tree [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1983.515904] env[62684]: DEBUG oslo_concurrency.lockutils [req-84a0234c-4152-417a-b349-e7b569099434 req-f0e96b2b-ffc6-4a18-8846-68bbb069b482 service nova] Releasing lock "refresh_cache-2baabe7a-ed33-4cef-9acc-a7b804610b0a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1983.516230] env[62684]: DEBUG oslo_concurrency.lockutils [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquired lock "refresh_cache-2baabe7a-ed33-4cef-9acc-a7b804610b0a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1983.516447] env[62684]: DEBUG nova.network.neutron [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1983.604210] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3b39e634-949a-48d5-a863-2dbb1ceb5045 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Lock "2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.626s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1983.646943] env[62684]: DEBUG oslo_vmware.api [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053010, 'name': PowerOnVM_Task, 'duration_secs': 0.537112} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1983.647233] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1983.647518] env[62684]: INFO nova.compute.manager [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Took 8.66 seconds to spawn the instance on the hypervisor. [ 1983.647744] env[62684]: DEBUG nova.compute.manager [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1983.648535] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e87bbba0-2b40-40f0-8e79-c7a87dbf1bd3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.877157] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7b4899a4-83c3-428d-9229-bd10b1caf5d5 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Acquiring lock "2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1983.877487] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7b4899a4-83c3-428d-9229-bd10b1caf5d5 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Lock "2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1983.877732] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7b4899a4-83c3-428d-9229-bd10b1caf5d5 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Acquiring lock "2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1983.877956] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7b4899a4-83c3-428d-9229-bd10b1caf5d5 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Lock "2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1983.878171] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7b4899a4-83c3-428d-9229-bd10b1caf5d5 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Lock "2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1983.880695] env[62684]: INFO nova.compute.manager [None req-7b4899a4-83c3-428d-9229-bd10b1caf5d5 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Terminating instance [ 1983.882374] env[62684]: DEBUG nova.compute.manager [None req-7b4899a4-83c3-428d-9229-bd10b1caf5d5 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1983.882571] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7b4899a4-83c3-428d-9229-bd10b1caf5d5 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1983.883422] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca4613d8-bf75-4c96-ab4a-a9d9e9f79a53 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.890939] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b4899a4-83c3-428d-9229-bd10b1caf5d5 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1983.891179] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e012bb47-7374-4566-b1db-50cddce5a718 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.897866] env[62684]: DEBUG oslo_vmware.api [None req-7b4899a4-83c3-428d-9229-bd10b1caf5d5 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Waiting for the task: (returnval){ [ 1983.897866] env[62684]: value = "task-2053011" [ 1983.897866] env[62684]: _type = "Task" [ 1983.897866] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1983.905373] env[62684]: DEBUG oslo_vmware.api [None req-7b4899a4-83c3-428d-9229-bd10b1caf5d5 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': task-2053011, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1983.987149] env[62684]: DEBUG oslo_vmware.api [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52913b8c-75cd-f01c-89b3-e0f5829210b5, 'name': SearchDatastore_Task, 'duration_secs': 0.025126} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1983.987437] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1983.987726] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 0156d807-1ab4-482f-91d1-172bf32bf23c/0156d807-1ab4-482f-91d1-172bf32bf23c.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1983.988028] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9b0c8ae1-6c3a-4ecc-8b7b-cb7281623048 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.992058] env[62684]: DEBUG oslo_concurrency.lockutils [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.899s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1983.992598] env[62684]: DEBUG nova.compute.manager [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1983.996510] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 26.656s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1983.996622] env[62684]: DEBUG nova.objects.instance [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62684) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1983.999205] env[62684]: DEBUG oslo_vmware.api [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 1983.999205] env[62684]: value = "task-2053012" [ 1983.999205] env[62684]: _type = "Task" [ 1983.999205] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1984.008084] env[62684]: DEBUG oslo_vmware.api [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053012, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.064223] env[62684]: DEBUG nova.network.neutron [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1984.169507] env[62684]: INFO nova.compute.manager [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Took 46.11 seconds to build instance. [ 1984.225742] env[62684]: DEBUG nova.network.neutron [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Updating instance_info_cache with network_info: [{"id": "3fcb3920-5e10-45e2-865d-cc9b89a1e335", "address": "fa:16:3e:2f:71:d6", "network": {"id": "bd253713-4e81-4c94-9689-22b81e7f51b6", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-307001665-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd812751722143fabedfa986a2d98b59", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3fcb3920-5e", "ovs_interfaceid": "3fcb3920-5e10-45e2-865d-cc9b89a1e335", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1984.408738] env[62684]: DEBUG oslo_vmware.api [None req-7b4899a4-83c3-428d-9229-bd10b1caf5d5 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': task-2053011, 'name': PowerOffVM_Task, 'duration_secs': 0.209386} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1984.409045] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b4899a4-83c3-428d-9229-bd10b1caf5d5 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1984.409228] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7b4899a4-83c3-428d-9229-bd10b1caf5d5 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1984.409504] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f47c4475-049b-4e74-8b76-a11482416430 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.504462] env[62684]: DEBUG nova.compute.utils [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1984.506039] env[62684]: DEBUG nova.compute.manager [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1984.506214] env[62684]: DEBUG nova.network.neutron [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1984.520398] env[62684]: DEBUG oslo_vmware.api [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053012, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.506933} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1984.520657] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 0156d807-1ab4-482f-91d1-172bf32bf23c/0156d807-1ab4-482f-91d1-172bf32bf23c.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1984.520922] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1984.521174] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-65488588-07a2-4864-8e95-5c636ecd3d75 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.527941] env[62684]: DEBUG oslo_vmware.api [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 1984.527941] env[62684]: value = "task-2053014" [ 1984.527941] env[62684]: _type = "Task" [ 1984.527941] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1984.535186] env[62684]: DEBUG oslo_vmware.api [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053014, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.587350] env[62684]: DEBUG nova.policy [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0c9327f7394249948899bf76e1837d36', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7855def9d0aa49abb7003ee504b9ccaf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1984.652301] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7b4899a4-83c3-428d-9229-bd10b1caf5d5 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1984.652651] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7b4899a4-83c3-428d-9229-bd10b1caf5d5 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1984.652738] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b4899a4-83c3-428d-9229-bd10b1caf5d5 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Deleting the datastore file [datastore1] 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1984.653037] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c22c892b-2c60-46ab-a447-33d94701e156 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.661238] env[62684]: DEBUG oslo_vmware.api [None req-7b4899a4-83c3-428d-9229-bd10b1caf5d5 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Waiting for the task: (returnval){ [ 1984.661238] env[62684]: value = "task-2053015" [ 1984.661238] env[62684]: _type = "Task" [ 1984.661238] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1984.669531] env[62684]: DEBUG oslo_vmware.api [None req-7b4899a4-83c3-428d-9229-bd10b1caf5d5 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': task-2053015, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.672059] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c4fa30f0-3d66-4e4f-9092-8451e636a4bf tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "57537508-06e7-43a4-95c5-c4399b8bf93f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.626s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1984.728872] env[62684]: DEBUG oslo_concurrency.lockutils [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Releasing lock "refresh_cache-2baabe7a-ed33-4cef-9acc-a7b804610b0a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1984.729173] env[62684]: DEBUG nova.compute.manager [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Instance network_info: |[{"id": "3fcb3920-5e10-45e2-865d-cc9b89a1e335", "address": "fa:16:3e:2f:71:d6", "network": {"id": "bd253713-4e81-4c94-9689-22b81e7f51b6", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-307001665-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd812751722143fabedfa986a2d98b59", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3fcb3920-5e", "ovs_interfaceid": "3fcb3920-5e10-45e2-865d-cc9b89a1e335", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1984.729629] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2f:71:d6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4055505f-97ab-400b-969c-43d99b38fd48', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3fcb3920-5e10-45e2-865d-cc9b89a1e335', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1984.738694] env[62684]: DEBUG oslo.service.loopingcall [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1984.738968] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1984.739291] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fa72d47b-551e-480c-aaaa-b2a1738cb929 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.761544] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1984.761544] env[62684]: value = "task-2053016" [ 1984.761544] env[62684]: _type = "Task" [ 1984.761544] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1984.769500] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053016, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.926453] env[62684]: DEBUG nova.compute.manager [req-dac33d9f-a0e9-49d2-96fd-e74ab870b1a7 req-6b25c51d-8a5e-4afe-a214-e3ca3ea81571 service nova] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Received event network-changed-0a96e2ce-2335-44e2-940d-26d3afbafa3a {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1984.926731] env[62684]: DEBUG nova.compute.manager [req-dac33d9f-a0e9-49d2-96fd-e74ab870b1a7 req-6b25c51d-8a5e-4afe-a214-e3ca3ea81571 service nova] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Refreshing instance network info cache due to event network-changed-0a96e2ce-2335-44e2-940d-26d3afbafa3a. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1984.926960] env[62684]: DEBUG oslo_concurrency.lockutils [req-dac33d9f-a0e9-49d2-96fd-e74ab870b1a7 req-6b25c51d-8a5e-4afe-a214-e3ca3ea81571 service nova] Acquiring lock "refresh_cache-57537508-06e7-43a4-95c5-c4399b8bf93f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1984.927257] env[62684]: DEBUG oslo_concurrency.lockutils [req-dac33d9f-a0e9-49d2-96fd-e74ab870b1a7 req-6b25c51d-8a5e-4afe-a214-e3ca3ea81571 service nova] Acquired lock "refresh_cache-57537508-06e7-43a4-95c5-c4399b8bf93f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1984.927506] env[62684]: DEBUG nova.network.neutron [req-dac33d9f-a0e9-49d2-96fd-e74ab870b1a7 req-6b25c51d-8a5e-4afe-a214-e3ca3ea81571 service nova] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Refreshing network info cache for port 0a96e2ce-2335-44e2-940d-26d3afbafa3a {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1984.990126] env[62684]: DEBUG nova.network.neutron [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Successfully created port: e41f6f98-c890-458a-b130-50e41463c0e4 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1985.007218] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d93e4c78-b13f-4d6b-9793-944d477b1faf tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1985.009193] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b5a2beb-7d61-4a57-8d93-21a2a7d36500 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.947s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1985.009692] env[62684]: DEBUG nova.objects.instance [None req-4b5a2beb-7d61-4a57-8d93-21a2a7d36500 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lazy-loading 'resources' on Instance uuid df93c57e-716c-4c73-b551-9079a523ea0b {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1985.015710] env[62684]: DEBUG nova.compute.manager [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1985.046023] env[62684]: DEBUG oslo_vmware.api [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053014, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069333} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1985.046023] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1985.046023] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0aa59a7-502f-4f7f-a389-1c542ab6e896 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.078150] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] 0156d807-1ab4-482f-91d1-172bf32bf23c/0156d807-1ab4-482f-91d1-172bf32bf23c.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1985.078150] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1ceea99b-db23-4312-980f-99faed3257f6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.102016] env[62684]: DEBUG oslo_vmware.api [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 1985.102016] env[62684]: value = "task-2053017" [ 1985.102016] env[62684]: _type = "Task" [ 1985.102016] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1985.111360] env[62684]: DEBUG oslo_vmware.api [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053017, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1985.172513] env[62684]: DEBUG oslo_vmware.api [None req-7b4899a4-83c3-428d-9229-bd10b1caf5d5 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': task-2053015, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146676} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1985.172850] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b4899a4-83c3-428d-9229-bd10b1caf5d5 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1985.173064] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7b4899a4-83c3-428d-9229-bd10b1caf5d5 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1985.173289] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7b4899a4-83c3-428d-9229-bd10b1caf5d5 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1985.173579] env[62684]: INFO nova.compute.manager [None req-7b4899a4-83c3-428d-9229-bd10b1caf5d5 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Took 1.29 seconds to destroy the instance on the hypervisor. [ 1985.173826] env[62684]: DEBUG oslo.service.loopingcall [None req-7b4899a4-83c3-428d-9229-bd10b1caf5d5 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1985.174095] env[62684]: DEBUG nova.compute.manager [-] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1985.174914] env[62684]: DEBUG nova.network.neutron [-] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1985.271693] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053016, 'name': CreateVM_Task, 'duration_secs': 0.421019} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1985.271863] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1985.273124] env[62684]: DEBUG oslo_concurrency.lockutils [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1985.273323] env[62684]: DEBUG oslo_concurrency.lockutils [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1985.273667] env[62684]: DEBUG oslo_concurrency.lockutils [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1985.274047] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8584a02-6086-4935-944c-4170719aad9d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.278899] env[62684]: DEBUG oslo_vmware.api [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1985.278899] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52df06c3-9b9e-bb96-7cd9-f4ff56fcdaed" [ 1985.278899] env[62684]: _type = "Task" [ 1985.278899] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1985.291427] env[62684]: DEBUG oslo_vmware.api [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52df06c3-9b9e-bb96-7cd9-f4ff56fcdaed, 'name': SearchDatastore_Task, 'duration_secs': 0.010025} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1985.291475] env[62684]: DEBUG oslo_concurrency.lockutils [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1985.291755] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1985.291992] env[62684]: DEBUG oslo_concurrency.lockutils [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1985.292154] env[62684]: DEBUG oslo_concurrency.lockutils [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1985.292332] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1985.292577] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3a5dc93c-5d68-4aab-97a0-1d64f377617d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.304706] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1985.304892] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1985.305969] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ea36f98-b764-46ad-a4e2-13aed99261e7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.311604] env[62684]: DEBUG oslo_vmware.api [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1985.311604] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ca534f-4012-7f4a-78b1-f5b5a7a77f12" [ 1985.311604] env[62684]: _type = "Task" [ 1985.311604] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1985.320294] env[62684]: DEBUG oslo_vmware.api [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ca534f-4012-7f4a-78b1-f5b5a7a77f12, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1985.615998] env[62684]: DEBUG oslo_vmware.api [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053017, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1985.632859] env[62684]: DEBUG nova.compute.manager [req-64486b79-1657-44e3-8df9-9f6ae7b70d5e req-58bb8761-ee87-4ad2-9982-f06b33377a2d service nova] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Received event network-vif-deleted-e42d7614-a61b-4dd8-bfda-e086b3dc3317 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1985.633041] env[62684]: INFO nova.compute.manager [req-64486b79-1657-44e3-8df9-9f6ae7b70d5e req-58bb8761-ee87-4ad2-9982-f06b33377a2d service nova] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Neutron deleted interface e42d7614-a61b-4dd8-bfda-e086b3dc3317; detaching it from the instance and deleting it from the info cache [ 1985.633586] env[62684]: DEBUG nova.network.neutron [req-64486b79-1657-44e3-8df9-9f6ae7b70d5e req-58bb8761-ee87-4ad2-9982-f06b33377a2d service nova] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Updating instance_info_cache with network_info: [{"id": "4ee262da-da6d-457d-a523-6d14746195e0", "address": "fa:16:3e:aa:35:57", "network": {"id": "08b060f5-b54c-44fb-b154-44f39707094d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1693424751", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.173", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "398aed99e10d457e9cadda3239b27831", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ee262da-da", "ovs_interfaceid": "4ee262da-da6d-457d-a523-6d14746195e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "7f34208d-7594-4a0b-8fef-8e970496dc46", "address": "fa:16:3e:47:fc:3a", "network": {"id": "36053f3d-f5ae-45b1-8837-9cf3452e81bd", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-568683507", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.232", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "398aed99e10d457e9cadda3239b27831", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "678ebbe4-4c53-4eaf-a689-93981310f37d", "external-id": "nsx-vlan-transportzone-443", "segmentation_id": 443, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f34208d-75", "ovs_interfaceid": "7f34208d-7594-4a0b-8fef-8e970496dc46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1985.823823] env[62684]: DEBUG oslo_vmware.api [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ca534f-4012-7f4a-78b1-f5b5a7a77f12, 'name': SearchDatastore_Task, 'duration_secs': 0.011246} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1985.824680] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5ace45e-4bde-4f94-9c25-548c4a530552 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.831696] env[62684]: DEBUG oslo_vmware.api [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1985.831696] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52350ecf-3f82-2406-fac6-e60ec615d126" [ 1985.831696] env[62684]: _type = "Task" [ 1985.831696] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1985.842833] env[62684]: DEBUG oslo_vmware.api [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52350ecf-3f82-2406-fac6-e60ec615d126, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1985.975513] env[62684]: DEBUG nova.network.neutron [req-dac33d9f-a0e9-49d2-96fd-e74ab870b1a7 req-6b25c51d-8a5e-4afe-a214-e3ca3ea81571 service nova] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Updated VIF entry in instance network info cache for port 0a96e2ce-2335-44e2-940d-26d3afbafa3a. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1985.976080] env[62684]: DEBUG nova.network.neutron [req-dac33d9f-a0e9-49d2-96fd-e74ab870b1a7 req-6b25c51d-8a5e-4afe-a214-e3ca3ea81571 service nova] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Updating instance_info_cache with network_info: [{"id": "0a96e2ce-2335-44e2-940d-26d3afbafa3a", "address": "fa:16:3e:6d:b8:02", "network": {"id": "1751424b-54a9-4879-9f32-aa15a9bb632c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-120070593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "263c101fcc5e493789b79dfd1ba97cc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92e4d027-e755-417b-8eea-9a8f24b85140", "external-id": "nsx-vlan-transportzone-756", "segmentation_id": 756, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a96e2ce-23", "ovs_interfaceid": "0a96e2ce-2335-44e2-940d-26d3afbafa3a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1985.993523] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a66d9b9c-b183-4eaf-bad4-325d8e740f93 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.002796] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaba41e5-efd4-4e86-9c6b-3846a77f7148 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.039635] env[62684]: DEBUG nova.compute.manager [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1986.040287] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af309255-ecf4-49a9-b9c6-2ead5418e4eb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.049292] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5eeaf96-1228-4a60-be49-44b291f0b664 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.067912] env[62684]: DEBUG nova.compute.provider_tree [None req-4b5a2beb-7d61-4a57-8d93-21a2a7d36500 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1986.074064] env[62684]: DEBUG nova.virt.hardware [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='76af9cfb7e26c98f3ae1920b4bb8d10a',container_format='bare',created_at=2025-01-10T07:49:28Z,direct_url=,disk_format='vmdk',id=0a70b1eb-e88e-4218-abd5-edf1113ecf05,min_disk=1,min_ram=0,name='tempest-test-snap-2088557181',owner='7855def9d0aa49abb7003ee504b9ccaf',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2025-01-10T07:49:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1986.074064] env[62684]: DEBUG nova.virt.hardware [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1986.074064] env[62684]: DEBUG nova.virt.hardware [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1986.074824] env[62684]: DEBUG nova.virt.hardware [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1986.075019] env[62684]: DEBUG nova.virt.hardware [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1986.075189] env[62684]: DEBUG nova.virt.hardware [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1986.075416] env[62684]: DEBUG nova.virt.hardware [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1986.075590] env[62684]: DEBUG nova.virt.hardware [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1986.075766] env[62684]: DEBUG nova.virt.hardware [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1986.075944] env[62684]: DEBUG nova.virt.hardware [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1986.076266] env[62684]: DEBUG nova.virt.hardware [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1986.077108] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daaac406-d9cf-409c-946b-70e8a614c3e8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.085496] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe76d506-5aed-4dd7-b986-10eef73b2e17 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.112868] env[62684]: DEBUG oslo_vmware.api [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053017, 'name': ReconfigVM_Task, 'duration_secs': 0.767306} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1986.113154] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Reconfigured VM instance instance-0000003e to attach disk [datastore1] 0156d807-1ab4-482f-91d1-172bf32bf23c/0156d807-1ab4-482f-91d1-172bf32bf23c.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1986.113808] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b9fcbc21-59c1-4373-8b25-b8e5c23a7733 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.121260] env[62684]: DEBUG oslo_vmware.api [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 1986.121260] env[62684]: value = "task-2053018" [ 1986.121260] env[62684]: _type = "Task" [ 1986.121260] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1986.129879] env[62684]: DEBUG oslo_vmware.api [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053018, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1986.142651] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dcbb160c-e26e-460a-a0ab-d6b6eeeb6429 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.152252] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f13dcdec-9664-49a1-b172-824d59372e2e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.190019] env[62684]: DEBUG nova.compute.manager [req-64486b79-1657-44e3-8df9-9f6ae7b70d5e req-58bb8761-ee87-4ad2-9982-f06b33377a2d service nova] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Detach interface failed, port_id=e42d7614-a61b-4dd8-bfda-e086b3dc3317, reason: Instance 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21 could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1986.341680] env[62684]: DEBUG oslo_vmware.api [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52350ecf-3f82-2406-fac6-e60ec615d126, 'name': SearchDatastore_Task, 'duration_secs': 0.010568} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1986.342042] env[62684]: DEBUG oslo_concurrency.lockutils [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1986.342305] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 2baabe7a-ed33-4cef-9acc-a7b804610b0a/2baabe7a-ed33-4cef-9acc-a7b804610b0a.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1986.342584] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-562e3279-e33e-46cd-8ab4-f604502d4e72 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.350281] env[62684]: DEBUG oslo_vmware.api [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1986.350281] env[62684]: value = "task-2053019" [ 1986.350281] env[62684]: _type = "Task" [ 1986.350281] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1986.358714] env[62684]: DEBUG oslo_vmware.api [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053019, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1986.478590] env[62684]: DEBUG oslo_concurrency.lockutils [req-dac33d9f-a0e9-49d2-96fd-e74ab870b1a7 req-6b25c51d-8a5e-4afe-a214-e3ca3ea81571 service nova] Releasing lock "refresh_cache-57537508-06e7-43a4-95c5-c4399b8bf93f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1986.515689] env[62684]: DEBUG nova.network.neutron [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Successfully updated port: e41f6f98-c890-458a-b130-50e41463c0e4 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1986.572684] env[62684]: DEBUG nova.scheduler.client.report [None req-4b5a2beb-7d61-4a57-8d93-21a2a7d36500 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1986.632970] env[62684]: DEBUG oslo_vmware.api [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053018, 'name': Rename_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1986.859990] env[62684]: DEBUG oslo_vmware.api [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053019, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.491374} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1986.860330] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 2baabe7a-ed33-4cef-9acc-a7b804610b0a/2baabe7a-ed33-4cef-9acc-a7b804610b0a.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1986.860482] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1986.860736] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3f4ddfcf-e008-4343-bebe-6a0cfe48877b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.862583] env[62684]: DEBUG nova.network.neutron [-] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1986.869140] env[62684]: DEBUG oslo_vmware.api [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1986.869140] env[62684]: value = "task-2053020" [ 1986.869140] env[62684]: _type = "Task" [ 1986.869140] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1986.877016] env[62684]: DEBUG oslo_vmware.api [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053020, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1986.958164] env[62684]: DEBUG nova.compute.manager [req-e5c2c2d3-a84b-4014-87d1-0998d223c5de req-9fd47071-34ef-465b-8b99-07920d83cd53 service nova] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Received event network-vif-plugged-e41f6f98-c890-458a-b130-50e41463c0e4 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1986.958420] env[62684]: DEBUG oslo_concurrency.lockutils [req-e5c2c2d3-a84b-4014-87d1-0998d223c5de req-9fd47071-34ef-465b-8b99-07920d83cd53 service nova] Acquiring lock "6faeae10-c0bd-4297-b992-c05511fedb21-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1986.958644] env[62684]: DEBUG oslo_concurrency.lockutils [req-e5c2c2d3-a84b-4014-87d1-0998d223c5de req-9fd47071-34ef-465b-8b99-07920d83cd53 service nova] Lock "6faeae10-c0bd-4297-b992-c05511fedb21-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1986.958822] env[62684]: DEBUG oslo_concurrency.lockutils [req-e5c2c2d3-a84b-4014-87d1-0998d223c5de req-9fd47071-34ef-465b-8b99-07920d83cd53 service nova] Lock "6faeae10-c0bd-4297-b992-c05511fedb21-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.959014] env[62684]: DEBUG nova.compute.manager [req-e5c2c2d3-a84b-4014-87d1-0998d223c5de req-9fd47071-34ef-465b-8b99-07920d83cd53 service nova] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] No waiting events found dispatching network-vif-plugged-e41f6f98-c890-458a-b130-50e41463c0e4 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1986.959225] env[62684]: WARNING nova.compute.manager [req-e5c2c2d3-a84b-4014-87d1-0998d223c5de req-9fd47071-34ef-465b-8b99-07920d83cd53 service nova] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Received unexpected event network-vif-plugged-e41f6f98-c890-458a-b130-50e41463c0e4 for instance with vm_state building and task_state spawning. [ 1986.959394] env[62684]: DEBUG nova.compute.manager [req-e5c2c2d3-a84b-4014-87d1-0998d223c5de req-9fd47071-34ef-465b-8b99-07920d83cd53 service nova] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Received event network-changed-e41f6f98-c890-458a-b130-50e41463c0e4 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1986.959738] env[62684]: DEBUG nova.compute.manager [req-e5c2c2d3-a84b-4014-87d1-0998d223c5de req-9fd47071-34ef-465b-8b99-07920d83cd53 service nova] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Refreshing instance network info cache due to event network-changed-e41f6f98-c890-458a-b130-50e41463c0e4. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1986.959970] env[62684]: DEBUG oslo_concurrency.lockutils [req-e5c2c2d3-a84b-4014-87d1-0998d223c5de req-9fd47071-34ef-465b-8b99-07920d83cd53 service nova] Acquiring lock "refresh_cache-6faeae10-c0bd-4297-b992-c05511fedb21" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1986.960142] env[62684]: DEBUG oslo_concurrency.lockutils [req-e5c2c2d3-a84b-4014-87d1-0998d223c5de req-9fd47071-34ef-465b-8b99-07920d83cd53 service nova] Acquired lock "refresh_cache-6faeae10-c0bd-4297-b992-c05511fedb21" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1986.960315] env[62684]: DEBUG nova.network.neutron [req-e5c2c2d3-a84b-4014-87d1-0998d223c5de req-9fd47071-34ef-465b-8b99-07920d83cd53 service nova] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Refreshing network info cache for port e41f6f98-c890-458a-b130-50e41463c0e4 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1987.018247] env[62684]: DEBUG oslo_concurrency.lockutils [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "refresh_cache-6faeae10-c0bd-4297-b992-c05511fedb21" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1987.078813] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b5a2beb-7d61-4a57-8d93-21a2a7d36500 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.069s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1987.081415] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dd10f84f-2d9f-4769-84c7-47e10fb7e40a tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.678s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1987.081662] env[62684]: DEBUG nova.objects.instance [None req-dd10f84f-2d9f-4769-84c7-47e10fb7e40a tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lazy-loading 'resources' on Instance uuid b009f710-1a94-4113-8feb-7cc5dd6a6519 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1987.101108] env[62684]: INFO nova.scheduler.client.report [None req-4b5a2beb-7d61-4a57-8d93-21a2a7d36500 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Deleted allocations for instance df93c57e-716c-4c73-b551-9079a523ea0b [ 1987.132791] env[62684]: DEBUG oslo_vmware.api [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053018, 'name': Rename_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.365561] env[62684]: INFO nova.compute.manager [-] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Took 2.19 seconds to deallocate network for instance. [ 1987.379694] env[62684]: DEBUG oslo_vmware.api [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053020, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092161} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1987.379961] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1987.380727] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d1d7dca-6414-4e2d-ae5d-372cf44cf405 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.407166] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] 2baabe7a-ed33-4cef-9acc-a7b804610b0a/2baabe7a-ed33-4cef-9acc-a7b804610b0a.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1987.408179] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-757b4c6e-f474-44f9-97fc-496d724a4402 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.429068] env[62684]: DEBUG oslo_vmware.api [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1987.429068] env[62684]: value = "task-2053021" [ 1987.429068] env[62684]: _type = "Task" [ 1987.429068] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1987.436842] env[62684]: DEBUG oslo_vmware.api [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053021, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.497076] env[62684]: DEBUG nova.network.neutron [req-e5c2c2d3-a84b-4014-87d1-0998d223c5de req-9fd47071-34ef-465b-8b99-07920d83cd53 service nova] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1987.583657] env[62684]: DEBUG nova.network.neutron [req-e5c2c2d3-a84b-4014-87d1-0998d223c5de req-9fd47071-34ef-465b-8b99-07920d83cd53 service nova] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1987.610644] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4b5a2beb-7d61-4a57-8d93-21a2a7d36500 tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lock "df93c57e-716c-4c73-b551-9079a523ea0b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.530s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1987.633438] env[62684]: DEBUG oslo_vmware.api [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053018, 'name': Rename_Task, 'duration_secs': 1.080227} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1987.635859] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1987.636287] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-51ed9468-687a-4c57-84ee-eef80542792a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.642648] env[62684]: DEBUG oslo_vmware.api [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 1987.642648] env[62684]: value = "task-2053022" [ 1987.642648] env[62684]: _type = "Task" [ 1987.642648] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1987.652655] env[62684]: DEBUG oslo_vmware.api [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053022, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.670113] env[62684]: DEBUG nova.compute.manager [req-c1dffb0b-f813-4771-8e8a-c4b7eac8031b req-c888e920-ec01-414f-9251-258bf3d7e661 service nova] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Received event network-vif-deleted-4ee262da-da6d-457d-a523-6d14746195e0 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1987.670113] env[62684]: DEBUG nova.compute.manager [req-c1dffb0b-f813-4771-8e8a-c4b7eac8031b req-c888e920-ec01-414f-9251-258bf3d7e661 service nova] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Received event network-vif-deleted-7f34208d-7594-4a0b-8fef-8e970496dc46 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1987.875701] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7b4899a4-83c3-428d-9229-bd10b1caf5d5 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1987.934371] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a88795eb-9cc7-48ce-a248-83c9b5928625 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.941958] env[62684]: DEBUG oslo_vmware.api [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053021, 'name': ReconfigVM_Task, 'duration_secs': 0.302412} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1987.943808] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Reconfigured VM instance instance-0000003f to attach disk [datastore1] 2baabe7a-ed33-4cef-9acc-a7b804610b0a/2baabe7a-ed33-4cef-9acc-a7b804610b0a.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1987.944505] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cfb880ee-3fd6-4911-8191-5cd9369bbb19 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.946665] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-185c8fd1-33c6-4698-877f-64eda32a9fc2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.978885] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa26efac-4abb-4a67-9876-f8c1b5120477 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.981888] env[62684]: DEBUG oslo_vmware.api [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1987.981888] env[62684]: value = "task-2053023" [ 1987.981888] env[62684]: _type = "Task" [ 1987.981888] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1987.988411] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3546347e-6864-46dc-bf2b-b00e7e86762a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.995415] env[62684]: DEBUG oslo_vmware.api [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053023, 'name': Rename_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1988.005752] env[62684]: DEBUG nova.compute.provider_tree [None req-dd10f84f-2d9f-4769-84c7-47e10fb7e40a tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1988.089251] env[62684]: DEBUG oslo_concurrency.lockutils [req-e5c2c2d3-a84b-4014-87d1-0998d223c5de req-9fd47071-34ef-465b-8b99-07920d83cd53 service nova] Releasing lock "refresh_cache-6faeae10-c0bd-4297-b992-c05511fedb21" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1988.089788] env[62684]: DEBUG oslo_concurrency.lockutils [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquired lock "refresh_cache-6faeae10-c0bd-4297-b992-c05511fedb21" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1988.089901] env[62684]: DEBUG nova.network.neutron [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1988.153645] env[62684]: DEBUG oslo_vmware.api [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053022, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1988.492731] env[62684]: DEBUG oslo_vmware.api [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053023, 'name': Rename_Task, 'duration_secs': 0.138548} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1988.493039] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1988.493584] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8ade95c5-e25a-405e-84bf-57924270cfb4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.501570] env[62684]: DEBUG oslo_vmware.api [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 1988.501570] env[62684]: value = "task-2053024" [ 1988.501570] env[62684]: _type = "Task" [ 1988.501570] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1988.510330] env[62684]: DEBUG nova.scheduler.client.report [None req-dd10f84f-2d9f-4769-84c7-47e10fb7e40a tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1988.513586] env[62684]: DEBUG oslo_vmware.api [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053024, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1988.626884] env[62684]: DEBUG nova.network.neutron [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1988.653753] env[62684]: DEBUG oslo_vmware.api [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053022, 'name': PowerOnVM_Task, 'duration_secs': 0.610649} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1988.654046] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1988.654270] env[62684]: INFO nova.compute.manager [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Took 11.10 seconds to spawn the instance on the hypervisor. [ 1988.654522] env[62684]: DEBUG nova.compute.manager [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1988.655251] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd23b89f-2d79-44be-8356-2b016d77d5f1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.773707] env[62684]: DEBUG nova.network.neutron [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Updating instance_info_cache with network_info: [{"id": "e41f6f98-c890-458a-b130-50e41463c0e4", "address": "fa:16:3e:69:1c:aa", "network": {"id": "2fa98fa4-ff7c-44e6-add0-693f55fd4b03", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2019954029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7855def9d0aa49abb7003ee504b9ccaf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape41f6f98-c8", "ovs_interfaceid": "e41f6f98-c890-458a-b130-50e41463c0e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1989.011220] env[62684]: DEBUG oslo_vmware.api [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053024, 'name': PowerOnVM_Task, 'duration_secs': 0.48738} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1989.011510] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1989.011697] env[62684]: INFO nova.compute.manager [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Took 8.89 seconds to spawn the instance on the hypervisor. [ 1989.011881] env[62684]: DEBUG nova.compute.manager [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1989.012660] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b670dccd-c1ec-41ba-8a12-e2e5aaaed3ef {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.016228] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dd10f84f-2d9f-4769-84c7-47e10fb7e40a tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.934s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1989.018333] env[62684]: DEBUG oslo_concurrency.lockutils [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.531s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1989.019838] env[62684]: INFO nova.compute.claims [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1989.037282] env[62684]: INFO nova.scheduler.client.report [None req-dd10f84f-2d9f-4769-84c7-47e10fb7e40a tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Deleted allocations for instance b009f710-1a94-4113-8feb-7cc5dd6a6519 [ 1989.172031] env[62684]: INFO nova.compute.manager [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Took 44.14 seconds to build instance. [ 1989.276949] env[62684]: DEBUG oslo_concurrency.lockutils [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Releasing lock "refresh_cache-6faeae10-c0bd-4297-b992-c05511fedb21" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1989.277268] env[62684]: DEBUG nova.compute.manager [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Instance network_info: |[{"id": "e41f6f98-c890-458a-b130-50e41463c0e4", "address": "fa:16:3e:69:1c:aa", "network": {"id": "2fa98fa4-ff7c-44e6-add0-693f55fd4b03", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2019954029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7855def9d0aa49abb7003ee504b9ccaf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape41f6f98-c8", "ovs_interfaceid": "e41f6f98-c890-458a-b130-50e41463c0e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1989.277754] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:69:1c:aa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0cd5d325-3053-407e-a4ee-f627e82a23f9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e41f6f98-c890-458a-b130-50e41463c0e4', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1989.285108] env[62684]: DEBUG oslo.service.loopingcall [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1989.285379] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1989.285701] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ccfe587a-4193-451a-ad38-173934a084e2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.307423] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1989.307423] env[62684]: value = "task-2053025" [ 1989.307423] env[62684]: _type = "Task" [ 1989.307423] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1989.319217] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053025, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.545199] env[62684]: INFO nova.compute.manager [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Took 43.06 seconds to build instance. [ 1989.550971] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dd10f84f-2d9f-4769-84c7-47e10fb7e40a tempest-MultipleCreateTestJSON-2097349723 tempest-MultipleCreateTestJSON-2097349723-project-member] Lock "b009f710-1a94-4113-8feb-7cc5dd6a6519" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.546s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1989.675800] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f0a3899d-f9d2-44e0-a313-42415fd6ed19 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "0156d807-1ab4-482f-91d1-172bf32bf23c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.104s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1989.821478] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053025, 'name': CreateVM_Task, 'duration_secs': 0.386783} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1989.821478] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1989.821478] env[62684]: DEBUG oslo_concurrency.lockutils [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/0a70b1eb-e88e-4218-abd5-edf1113ecf05" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1989.821478] env[62684]: DEBUG oslo_concurrency.lockutils [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquired lock "[datastore1] devstack-image-cache_base/0a70b1eb-e88e-4218-abd5-edf1113ecf05" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1989.821478] env[62684]: DEBUG oslo_concurrency.lockutils [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/0a70b1eb-e88e-4218-abd5-edf1113ecf05" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1989.821478] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81396bd9-f3c8-486e-aa19-97a0a226a71c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.828960] env[62684]: DEBUG oslo_vmware.api [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1989.828960] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52aa9013-c959-f097-4a8b-2a8fd50b68ac" [ 1989.828960] env[62684]: _type = "Task" [ 1989.828960] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1989.838895] env[62684]: DEBUG oslo_vmware.api [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52aa9013-c959-f097-4a8b-2a8fd50b68ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1990.050764] env[62684]: DEBUG oslo_concurrency.lockutils [None req-62946359-c0dd-4b48-8a14-0684b38ed51d tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lock "2baabe7a-ed33-4cef-9acc-a7b804610b0a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.189s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1990.331821] env[62684]: DEBUG nova.compute.manager [req-69bde73a-4b77-45bd-8100-adcf548a1591 req-c969a55e-e2bf-401e-b802-81630afdf7b6 service nova] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Received event network-changed-f5c06971-b96a-4fa0-858e-5e47100e2e68 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1990.332040] env[62684]: DEBUG nova.compute.manager [req-69bde73a-4b77-45bd-8100-adcf548a1591 req-c969a55e-e2bf-401e-b802-81630afdf7b6 service nova] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Refreshing instance network info cache due to event network-changed-f5c06971-b96a-4fa0-858e-5e47100e2e68. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1990.332308] env[62684]: DEBUG oslo_concurrency.lockutils [req-69bde73a-4b77-45bd-8100-adcf548a1591 req-c969a55e-e2bf-401e-b802-81630afdf7b6 service nova] Acquiring lock "refresh_cache-0156d807-1ab4-482f-91d1-172bf32bf23c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1990.332483] env[62684]: DEBUG oslo_concurrency.lockutils [req-69bde73a-4b77-45bd-8100-adcf548a1591 req-c969a55e-e2bf-401e-b802-81630afdf7b6 service nova] Acquired lock "refresh_cache-0156d807-1ab4-482f-91d1-172bf32bf23c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1990.332657] env[62684]: DEBUG nova.network.neutron [req-69bde73a-4b77-45bd-8100-adcf548a1591 req-c969a55e-e2bf-401e-b802-81630afdf7b6 service nova] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Refreshing network info cache for port f5c06971-b96a-4fa0-858e-5e47100e2e68 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1990.352329] env[62684]: DEBUG oslo_concurrency.lockutils [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Releasing lock "[datastore1] devstack-image-cache_base/0a70b1eb-e88e-4218-abd5-edf1113ecf05" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1990.352688] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Processing image 0a70b1eb-e88e-4218-abd5-edf1113ecf05 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1990.352999] env[62684]: DEBUG oslo_concurrency.lockutils [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/0a70b1eb-e88e-4218-abd5-edf1113ecf05/0a70b1eb-e88e-4218-abd5-edf1113ecf05.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1990.353236] env[62684]: DEBUG oslo_concurrency.lockutils [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquired lock "[datastore1] devstack-image-cache_base/0a70b1eb-e88e-4218-abd5-edf1113ecf05/0a70b1eb-e88e-4218-abd5-edf1113ecf05.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1990.353501] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1990.353820] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d6d8c776-791c-40a1-bfd1-b1b73cc1ca3c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.372332] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1990.372578] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1990.373356] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e6aae72-0515-4509-855f-efb1a5e0412a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.378914] env[62684]: DEBUG oslo_vmware.api [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1990.378914] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5204663f-f43b-b9ee-b520-9f8a402bfbb6" [ 1990.378914] env[62684]: _type = "Task" [ 1990.378914] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1990.389235] env[62684]: DEBUG oslo_vmware.api [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5204663f-f43b-b9ee-b520-9f8a402bfbb6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1990.473808] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17393dac-8098-4571-bdf0-205024912da0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.482781] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbbbb43c-5870-4932-8f76-4301f141d0e1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.518410] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0a29535-d367-478b-9284-9fc25558ce91 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.526999] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41e8cee9-e33b-4da1-8edb-872dbfb4439c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.541182] env[62684]: DEBUG nova.compute.provider_tree [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1990.895375] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Preparing fetch location {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1990.895375] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Fetch image to [datastore1] OSTACK_IMG_c115d546-c3c2-419a-8ad2-e1c293d7274a/OSTACK_IMG_c115d546-c3c2-419a-8ad2-e1c293d7274a.vmdk {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1990.895375] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Downloading stream optimized image 0a70b1eb-e88e-4218-abd5-edf1113ecf05 to [datastore1] OSTACK_IMG_c115d546-c3c2-419a-8ad2-e1c293d7274a/OSTACK_IMG_c115d546-c3c2-419a-8ad2-e1c293d7274a.vmdk on the data store datastore1 as vApp {{(pid=62684) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1990.895535] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Downloading image file data 0a70b1eb-e88e-4218-abd5-edf1113ecf05 to the ESX as VM named 'OSTACK_IMG_c115d546-c3c2-419a-8ad2-e1c293d7274a' {{(pid=62684) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1991.001029] env[62684]: DEBUG oslo_vmware.rw_handles [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1991.001029] env[62684]: value = "resgroup-9" [ 1991.001029] env[62684]: _type = "ResourcePool" [ 1991.001029] env[62684]: }. {{(pid=62684) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1991.001367] env[62684]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-5161e9be-2ec8-462e-935d-1b512c27f01a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.034408] env[62684]: DEBUG oslo_vmware.rw_handles [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lease: (returnval){ [ 1991.034408] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52635427-4f80-89a6-8437-857f30ab845c" [ 1991.034408] env[62684]: _type = "HttpNfcLease" [ 1991.034408] env[62684]: } obtained for vApp import into resource pool (val){ [ 1991.034408] env[62684]: value = "resgroup-9" [ 1991.034408] env[62684]: _type = "ResourcePool" [ 1991.034408] env[62684]: }. {{(pid=62684) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1991.034780] env[62684]: DEBUG oslo_vmware.api [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the lease: (returnval){ [ 1991.034780] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52635427-4f80-89a6-8437-857f30ab845c" [ 1991.034780] env[62684]: _type = "HttpNfcLease" [ 1991.034780] env[62684]: } to be ready. {{(pid=62684) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1991.042315] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1991.042315] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52635427-4f80-89a6-8437-857f30ab845c" [ 1991.042315] env[62684]: _type = "HttpNfcLease" [ 1991.042315] env[62684]: } is initializing. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1991.044826] env[62684]: DEBUG nova.scheduler.client.report [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1991.187233] env[62684]: DEBUG nova.network.neutron [req-69bde73a-4b77-45bd-8100-adcf548a1591 req-c969a55e-e2bf-401e-b802-81630afdf7b6 service nova] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Updated VIF entry in instance network info cache for port f5c06971-b96a-4fa0-858e-5e47100e2e68. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1991.187233] env[62684]: DEBUG nova.network.neutron [req-69bde73a-4b77-45bd-8100-adcf548a1591 req-c969a55e-e2bf-401e-b802-81630afdf7b6 service nova] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Updating instance_info_cache with network_info: [{"id": "f5c06971-b96a-4fa0-858e-5e47100e2e68", "address": "fa:16:3e:9e:fc:9d", "network": {"id": "e177c6d0-ddd5-4029-94af-c8f1b937dd9f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1344612161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.224", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27d04006afc747e19ad87238bfdbaad1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "171aeae0-6a27-44fc-bc3d-a2d5581fc702", "external-id": "nsx-vlan-transportzone-410", "segmentation_id": 410, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5c06971-b9", "ovs_interfaceid": "f5c06971-b96a-4fa0-858e-5e47100e2e68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1991.542721] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1991.542721] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52635427-4f80-89a6-8437-857f30ab845c" [ 1991.542721] env[62684]: _type = "HttpNfcLease" [ 1991.542721] env[62684]: } is ready. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1991.543069] env[62684]: DEBUG oslo_vmware.rw_handles [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1991.543069] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52635427-4f80-89a6-8437-857f30ab845c" [ 1991.543069] env[62684]: _type = "HttpNfcLease" [ 1991.543069] env[62684]: }. {{(pid=62684) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1991.543895] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37804919-8e53-48dc-b371-9abc534f3707 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.551542] env[62684]: DEBUG oslo_vmware.rw_handles [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52477764-6371-91ff-e724-95d69ed6578c/disk-0.vmdk from lease info. {{(pid=62684) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1991.551747] env[62684]: DEBUG oslo_vmware.rw_handles [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52477764-6371-91ff-e724-95d69ed6578c/disk-0.vmdk. {{(pid=62684) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1991.553673] env[62684]: DEBUG oslo_concurrency.lockutils [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.535s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1991.554191] env[62684]: DEBUG nova.compute.manager [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1991.559504] env[62684]: DEBUG oslo_concurrency.lockutils [None req-062c4524-cc95-406e-a83d-db9432b1ca02 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.380s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1991.559504] env[62684]: DEBUG nova.objects.instance [None req-062c4524-cc95-406e-a83d-db9432b1ca02 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lazy-loading 'resources' on Instance uuid e3dd1bc0-f292-4ac7-a8db-324887a18411 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1991.633021] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b85fbccf-4676-4705-8c70-f00758d2e476 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.694425] env[62684]: DEBUG oslo_concurrency.lockutils [req-69bde73a-4b77-45bd-8100-adcf548a1591 req-c969a55e-e2bf-401e-b802-81630afdf7b6 service nova] Releasing lock "refresh_cache-0156d807-1ab4-482f-91d1-172bf32bf23c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1992.036997] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc708405-bb2e-4319-8535-c5f0c0ac867b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.052156] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1381671f-0d9a-441f-81d8-b8f76d267ee3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.098576] env[62684]: DEBUG nova.compute.utils [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1992.105456] env[62684]: DEBUG nova.compute.manager [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1992.105456] env[62684]: DEBUG nova.network.neutron [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1992.108864] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94a60608-5cce-412a-a790-90c2f78e2ea2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.122568] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-800d7f84-5fa8-4e1c-a721-a7f0f414e127 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.137774] env[62684]: DEBUG nova.compute.provider_tree [None req-062c4524-cc95-406e-a83d-db9432b1ca02 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1992.167482] env[62684]: DEBUG nova.policy [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1824ab449ef743b2ad31816c563814b0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c64d2a551d44429ca54f58862c64fe9f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 1992.360454] env[62684]: DEBUG nova.compute.manager [req-d51a1f84-cd60-498d-b87b-a8dd3b1eb79d req-7c8e6fe7-4799-430b-a254-7160c4f5069c service nova] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Received event network-changed-3fcb3920-5e10-45e2-865d-cc9b89a1e335 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1992.360759] env[62684]: DEBUG nova.compute.manager [req-d51a1f84-cd60-498d-b87b-a8dd3b1eb79d req-7c8e6fe7-4799-430b-a254-7160c4f5069c service nova] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Refreshing instance network info cache due to event network-changed-3fcb3920-5e10-45e2-865d-cc9b89a1e335. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1992.360885] env[62684]: DEBUG oslo_concurrency.lockutils [req-d51a1f84-cd60-498d-b87b-a8dd3b1eb79d req-7c8e6fe7-4799-430b-a254-7160c4f5069c service nova] Acquiring lock "refresh_cache-2baabe7a-ed33-4cef-9acc-a7b804610b0a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1992.361513] env[62684]: DEBUG oslo_concurrency.lockutils [req-d51a1f84-cd60-498d-b87b-a8dd3b1eb79d req-7c8e6fe7-4799-430b-a254-7160c4f5069c service nova] Acquired lock "refresh_cache-2baabe7a-ed33-4cef-9acc-a7b804610b0a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1992.361734] env[62684]: DEBUG nova.network.neutron [req-d51a1f84-cd60-498d-b87b-a8dd3b1eb79d req-7c8e6fe7-4799-430b-a254-7160c4f5069c service nova] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Refreshing network info cache for port 3fcb3920-5e10-45e2-865d-cc9b89a1e335 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1992.449392] env[62684]: DEBUG nova.network.neutron [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Successfully created port: f3a330d0-ed4d-48e9-956c-1f6ee2137ea2 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1992.605730] env[62684]: DEBUG nova.compute.manager [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1992.646854] env[62684]: DEBUG nova.scheduler.client.report [None req-062c4524-cc95-406e-a83d-db9432b1ca02 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1992.955371] env[62684]: DEBUG oslo_vmware.rw_handles [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Completed reading data from the image iterator. {{(pid=62684) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1992.955620] env[62684]: DEBUG oslo_vmware.rw_handles [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52477764-6371-91ff-e724-95d69ed6578c/disk-0.vmdk. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1992.956641] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-847b0638-3b25-4086-ba00-efd9da2d47fa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.963065] env[62684]: DEBUG oslo_vmware.rw_handles [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52477764-6371-91ff-e724-95d69ed6578c/disk-0.vmdk is in state: ready. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1992.963249] env[62684]: DEBUG oslo_vmware.rw_handles [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52477764-6371-91ff-e724-95d69ed6578c/disk-0.vmdk. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1992.965596] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-0ac9ac02-c5b4-4fea-8755-74f04aa84ef3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.152860] env[62684]: DEBUG oslo_concurrency.lockutils [None req-062c4524-cc95-406e-a83d-db9432b1ca02 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.594s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1993.155179] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1654224f-ec70-4017-8ec2-cf8ca3fe2bac tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.974s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1993.155420] env[62684]: DEBUG nova.objects.instance [None req-1654224f-ec70-4017-8ec2-cf8ca3fe2bac tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Lazy-loading 'resources' on Instance uuid effc673a-103f-413b-88ac-6907ad1ee852 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1993.178533] env[62684]: INFO nova.scheduler.client.report [None req-062c4524-cc95-406e-a83d-db9432b1ca02 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Deleted allocations for instance e3dd1bc0-f292-4ac7-a8db-324887a18411 [ 1993.234018] env[62684]: DEBUG nova.network.neutron [req-d51a1f84-cd60-498d-b87b-a8dd3b1eb79d req-7c8e6fe7-4799-430b-a254-7160c4f5069c service nova] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Updated VIF entry in instance network info cache for port 3fcb3920-5e10-45e2-865d-cc9b89a1e335. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1993.234491] env[62684]: DEBUG nova.network.neutron [req-d51a1f84-cd60-498d-b87b-a8dd3b1eb79d req-7c8e6fe7-4799-430b-a254-7160c4f5069c service nova] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Updating instance_info_cache with network_info: [{"id": "3fcb3920-5e10-45e2-865d-cc9b89a1e335", "address": "fa:16:3e:2f:71:d6", "network": {"id": "bd253713-4e81-4c94-9689-22b81e7f51b6", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-307001665-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd812751722143fabedfa986a2d98b59", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3fcb3920-5e", "ovs_interfaceid": "3fcb3920-5e10-45e2-865d-cc9b89a1e335", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1993.355014] env[62684]: DEBUG oslo_vmware.rw_handles [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52477764-6371-91ff-e724-95d69ed6578c/disk-0.vmdk. {{(pid=62684) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1993.355280] env[62684]: INFO nova.virt.vmwareapi.images [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Downloaded image file data 0a70b1eb-e88e-4218-abd5-edf1113ecf05 [ 1993.356194] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3a10543-f638-4084-92eb-b3c3744aec30 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.374707] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8062283b-4d5b-4700-bd8b-dfb9cfa63e0f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.506241] env[62684]: INFO nova.virt.vmwareapi.images [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] The imported VM was unregistered [ 1993.508569] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Caching image {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1993.508879] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Creating directory with path [datastore1] devstack-image-cache_base/0a70b1eb-e88e-4218-abd5-edf1113ecf05 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1993.509415] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b339cfd0-dc75-430c-ac29-35a92aafcd3c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.521584] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Created directory with path [datastore1] devstack-image-cache_base/0a70b1eb-e88e-4218-abd5-edf1113ecf05 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1993.521860] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_c115d546-c3c2-419a-8ad2-e1c293d7274a/OSTACK_IMG_c115d546-c3c2-419a-8ad2-e1c293d7274a.vmdk to [datastore1] devstack-image-cache_base/0a70b1eb-e88e-4218-abd5-edf1113ecf05/0a70b1eb-e88e-4218-abd5-edf1113ecf05.vmdk. {{(pid=62684) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1993.522034] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-bcdb7d25-f3fc-414e-9c32-639e32c3a9b8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.528918] env[62684]: DEBUG oslo_vmware.api [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1993.528918] env[62684]: value = "task-2053028" [ 1993.528918] env[62684]: _type = "Task" [ 1993.528918] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1993.536901] env[62684]: DEBUG oslo_vmware.api [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053028, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1993.615682] env[62684]: DEBUG nova.compute.manager [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1993.641917] env[62684]: DEBUG nova.virt.hardware [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1993.642222] env[62684]: DEBUG nova.virt.hardware [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1993.642414] env[62684]: DEBUG nova.virt.hardware [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1993.642640] env[62684]: DEBUG nova.virt.hardware [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1993.642836] env[62684]: DEBUG nova.virt.hardware [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1993.642993] env[62684]: DEBUG nova.virt.hardware [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1993.643253] env[62684]: DEBUG nova.virt.hardware [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1993.643449] env[62684]: DEBUG nova.virt.hardware [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1993.643655] env[62684]: DEBUG nova.virt.hardware [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1993.643855] env[62684]: DEBUG nova.virt.hardware [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1993.644077] env[62684]: DEBUG nova.virt.hardware [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1993.645026] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf99e137-a340-44ba-96c9-579b2810d576 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.653753] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-788e0e8e-8724-4fbe-8544-1e8b72887b41 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.691450] env[62684]: DEBUG oslo_concurrency.lockutils [None req-062c4524-cc95-406e-a83d-db9432b1ca02 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "e3dd1bc0-f292-4ac7-a8db-324887a18411" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.525s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1993.737659] env[62684]: DEBUG oslo_concurrency.lockutils [req-d51a1f84-cd60-498d-b87b-a8dd3b1eb79d req-7c8e6fe7-4799-430b-a254-7160c4f5069c service nova] Releasing lock "refresh_cache-2baabe7a-ed33-4cef-9acc-a7b804610b0a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1993.975021] env[62684]: DEBUG nova.network.neutron [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Successfully updated port: f3a330d0-ed4d-48e9-956c-1f6ee2137ea2 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1994.001948] env[62684]: DEBUG oslo_concurrency.lockutils [None req-21b37dfa-cf12-48d6-942e-b36d027ce5c8 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquiring lock "4a15d298-115f-4132-8be0-00e623fa21d8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1994.002615] env[62684]: DEBUG oslo_concurrency.lockutils [None req-21b37dfa-cf12-48d6-942e-b36d027ce5c8 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "4a15d298-115f-4132-8be0-00e623fa21d8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1994.002615] env[62684]: DEBUG oslo_concurrency.lockutils [None req-21b37dfa-cf12-48d6-942e-b36d027ce5c8 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquiring lock "4a15d298-115f-4132-8be0-00e623fa21d8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1994.002732] env[62684]: DEBUG oslo_concurrency.lockutils [None req-21b37dfa-cf12-48d6-942e-b36d027ce5c8 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "4a15d298-115f-4132-8be0-00e623fa21d8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1994.002812] env[62684]: DEBUG oslo_concurrency.lockutils [None req-21b37dfa-cf12-48d6-942e-b36d027ce5c8 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "4a15d298-115f-4132-8be0-00e623fa21d8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1994.005339] env[62684]: INFO nova.compute.manager [None req-21b37dfa-cf12-48d6-942e-b36d027ce5c8 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Terminating instance [ 1994.007696] env[62684]: DEBUG nova.compute.manager [None req-21b37dfa-cf12-48d6-942e-b36d027ce5c8 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1994.007881] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-21b37dfa-cf12-48d6-942e-b36d027ce5c8 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1994.008755] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e40b500a-17de-447c-a572-e8b82c54f4da {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.023423] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-21b37dfa-cf12-48d6-942e-b36d027ce5c8 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1994.023729] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e88303e4-4c64-4cab-8ef5-00a94afc5575 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.034904] env[62684]: DEBUG oslo_vmware.api [None req-21b37dfa-cf12-48d6-942e-b36d027ce5c8 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1994.034904] env[62684]: value = "task-2053029" [ 1994.034904] env[62684]: _type = "Task" [ 1994.034904] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1994.042054] env[62684]: DEBUG oslo_vmware.api [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053028, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1994.046169] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1c90d01-d9b0-4010-aea6-8f5641b78356 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.053301] env[62684]: DEBUG oslo_vmware.api [None req-21b37dfa-cf12-48d6-942e-b36d027ce5c8 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2053029, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1994.057992] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b5fd531-c7d2-4916-bb2c-3bfc03881b7c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.090090] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59ab5b62-cc61-4400-8c01-6b32a15b5a45 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.098404] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-827052a8-a270-479a-a8f0-8157548454b7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.112945] env[62684]: DEBUG nova.compute.provider_tree [None req-1654224f-ec70-4017-8ec2-cf8ca3fe2bac tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1994.457169] env[62684]: DEBUG nova.compute.manager [req-7eb1300d-86e4-4b19-809d-d6ba0123f8a6 req-d6bf8769-b462-43d4-aa3d-0d59d97e82e9 service nova] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Received event network-vif-plugged-f3a330d0-ed4d-48e9-956c-1f6ee2137ea2 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1994.457426] env[62684]: DEBUG oslo_concurrency.lockutils [req-7eb1300d-86e4-4b19-809d-d6ba0123f8a6 req-d6bf8769-b462-43d4-aa3d-0d59d97e82e9 service nova] Acquiring lock "548df581-073b-41d4-bcbe-df7342a2beca-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1994.457675] env[62684]: DEBUG oslo_concurrency.lockutils [req-7eb1300d-86e4-4b19-809d-d6ba0123f8a6 req-d6bf8769-b462-43d4-aa3d-0d59d97e82e9 service nova] Lock "548df581-073b-41d4-bcbe-df7342a2beca-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1994.457896] env[62684]: DEBUG oslo_concurrency.lockutils [req-7eb1300d-86e4-4b19-809d-d6ba0123f8a6 req-d6bf8769-b462-43d4-aa3d-0d59d97e82e9 service nova] Lock "548df581-073b-41d4-bcbe-df7342a2beca-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1994.458098] env[62684]: DEBUG nova.compute.manager [req-7eb1300d-86e4-4b19-809d-d6ba0123f8a6 req-d6bf8769-b462-43d4-aa3d-0d59d97e82e9 service nova] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] No waiting events found dispatching network-vif-plugged-f3a330d0-ed4d-48e9-956c-1f6ee2137ea2 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1994.458277] env[62684]: WARNING nova.compute.manager [req-7eb1300d-86e4-4b19-809d-d6ba0123f8a6 req-d6bf8769-b462-43d4-aa3d-0d59d97e82e9 service nova] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Received unexpected event network-vif-plugged-f3a330d0-ed4d-48e9-956c-1f6ee2137ea2 for instance with vm_state building and task_state spawning. [ 1994.458444] env[62684]: DEBUG nova.compute.manager [req-7eb1300d-86e4-4b19-809d-d6ba0123f8a6 req-d6bf8769-b462-43d4-aa3d-0d59d97e82e9 service nova] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Received event network-changed-f3a330d0-ed4d-48e9-956c-1f6ee2137ea2 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1994.458648] env[62684]: DEBUG nova.compute.manager [req-7eb1300d-86e4-4b19-809d-d6ba0123f8a6 req-d6bf8769-b462-43d4-aa3d-0d59d97e82e9 service nova] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Refreshing instance network info cache due to event network-changed-f3a330d0-ed4d-48e9-956c-1f6ee2137ea2. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1994.458788] env[62684]: DEBUG oslo_concurrency.lockutils [req-7eb1300d-86e4-4b19-809d-d6ba0123f8a6 req-d6bf8769-b462-43d4-aa3d-0d59d97e82e9 service nova] Acquiring lock "refresh_cache-548df581-073b-41d4-bcbe-df7342a2beca" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1994.458941] env[62684]: DEBUG oslo_concurrency.lockutils [req-7eb1300d-86e4-4b19-809d-d6ba0123f8a6 req-d6bf8769-b462-43d4-aa3d-0d59d97e82e9 service nova] Acquired lock "refresh_cache-548df581-073b-41d4-bcbe-df7342a2beca" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1994.459120] env[62684]: DEBUG nova.network.neutron [req-7eb1300d-86e4-4b19-809d-d6ba0123f8a6 req-d6bf8769-b462-43d4-aa3d-0d59d97e82e9 service nova] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Refreshing network info cache for port f3a330d0-ed4d-48e9-956c-1f6ee2137ea2 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1994.477627] env[62684]: DEBUG oslo_concurrency.lockutils [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Acquiring lock "refresh_cache-548df581-073b-41d4-bcbe-df7342a2beca" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1994.540096] env[62684]: DEBUG oslo_vmware.api [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053028, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1994.551050] env[62684]: DEBUG oslo_vmware.api [None req-21b37dfa-cf12-48d6-942e-b36d027ce5c8 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2053029, 'name': PowerOffVM_Task, 'duration_secs': 0.271356} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1994.551361] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-21b37dfa-cf12-48d6-942e-b36d027ce5c8 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1994.551535] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-21b37dfa-cf12-48d6-942e-b36d027ce5c8 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1994.551810] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-de577b2f-b11e-45ed-99d8-9c1e860b2518 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.617069] env[62684]: DEBUG nova.scheduler.client.report [None req-1654224f-ec70-4017-8ec2-cf8ca3fe2bac tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1994.705057] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-21b37dfa-cf12-48d6-942e-b36d027ce5c8 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1994.705250] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-21b37dfa-cf12-48d6-942e-b36d027ce5c8 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1994.705299] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-21b37dfa-cf12-48d6-942e-b36d027ce5c8 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Deleting the datastore file [datastore2] 4a15d298-115f-4132-8be0-00e623fa21d8 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1994.705582] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-024a7f0b-39d9-4ecb-a548-0186affbf943 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.713884] env[62684]: DEBUG oslo_vmware.api [None req-21b37dfa-cf12-48d6-942e-b36d027ce5c8 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 1994.713884] env[62684]: value = "task-2053031" [ 1994.713884] env[62684]: _type = "Task" [ 1994.713884] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1994.725077] env[62684]: DEBUG oslo_vmware.api [None req-21b37dfa-cf12-48d6-942e-b36d027ce5c8 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2053031, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1994.994401] env[62684]: DEBUG nova.network.neutron [req-7eb1300d-86e4-4b19-809d-d6ba0123f8a6 req-d6bf8769-b462-43d4-aa3d-0d59d97e82e9 service nova] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1995.040211] env[62684]: DEBUG oslo_vmware.api [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053028, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1995.095752] env[62684]: DEBUG nova.network.neutron [req-7eb1300d-86e4-4b19-809d-d6ba0123f8a6 req-d6bf8769-b462-43d4-aa3d-0d59d97e82e9 service nova] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1995.123112] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1654224f-ec70-4017-8ec2-cf8ca3fe2bac tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.968s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1995.125728] env[62684]: DEBUG oslo_concurrency.lockutils [None req-24b217e1-18bc-43ab-be88-c0745835e337 tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.832s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1995.125860] env[62684]: DEBUG nova.objects.instance [None req-24b217e1-18bc-43ab-be88-c0745835e337 tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Lazy-loading 'resources' on Instance uuid a3c7943e-7528-41bc-9a20-1e2b57f832e3 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1995.147272] env[62684]: INFO nova.scheduler.client.report [None req-1654224f-ec70-4017-8ec2-cf8ca3fe2bac tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Deleted allocations for instance effc673a-103f-413b-88ac-6907ad1ee852 [ 1995.223959] env[62684]: DEBUG oslo_vmware.api [None req-21b37dfa-cf12-48d6-942e-b36d027ce5c8 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2053031, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.296679} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1995.224269] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-21b37dfa-cf12-48d6-942e-b36d027ce5c8 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1995.224495] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-21b37dfa-cf12-48d6-942e-b36d027ce5c8 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1995.224692] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-21b37dfa-cf12-48d6-942e-b36d027ce5c8 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1995.224889] env[62684]: INFO nova.compute.manager [None req-21b37dfa-cf12-48d6-942e-b36d027ce5c8 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1995.225149] env[62684]: DEBUG oslo.service.loopingcall [None req-21b37dfa-cf12-48d6-942e-b36d027ce5c8 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1995.225360] env[62684]: DEBUG nova.compute.manager [-] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1995.225458] env[62684]: DEBUG nova.network.neutron [-] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1995.542582] env[62684]: DEBUG oslo_vmware.api [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053028, 'name': MoveVirtualDisk_Task} progress is 88%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1995.598234] env[62684]: DEBUG oslo_concurrency.lockutils [req-7eb1300d-86e4-4b19-809d-d6ba0123f8a6 req-d6bf8769-b462-43d4-aa3d-0d59d97e82e9 service nova] Releasing lock "refresh_cache-548df581-073b-41d4-bcbe-df7342a2beca" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1995.598686] env[62684]: DEBUG oslo_concurrency.lockutils [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Acquired lock "refresh_cache-548df581-073b-41d4-bcbe-df7342a2beca" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1995.598886] env[62684]: DEBUG nova.network.neutron [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1995.659744] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1654224f-ec70-4017-8ec2-cf8ca3fe2bac tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Lock "effc673a-103f-413b-88ac-6907ad1ee852" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.136s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1995.983594] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6589f8b4-70ec-4584-960f-db1bc75c9b28 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.991764] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b14966dc-d4bf-4f52-b2b5-60761cc70741 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.994827] env[62684]: DEBUG nova.network.neutron [-] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1996.025505] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9dc5421-05ef-470f-aca5-642eef8e42ae {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.038014] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05140360-e534-4885-bbc8-d2459624d48a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.054324] env[62684]: DEBUG nova.compute.provider_tree [None req-24b217e1-18bc-43ab-be88-c0745835e337 tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1996.059460] env[62684]: DEBUG oslo_vmware.api [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053028, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.313471} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1996.060041] env[62684]: INFO nova.virt.vmwareapi.ds_util [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_c115d546-c3c2-419a-8ad2-e1c293d7274a/OSTACK_IMG_c115d546-c3c2-419a-8ad2-e1c293d7274a.vmdk to [datastore1] devstack-image-cache_base/0a70b1eb-e88e-4218-abd5-edf1113ecf05/0a70b1eb-e88e-4218-abd5-edf1113ecf05.vmdk. [ 1996.060272] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Cleaning up location [datastore1] OSTACK_IMG_c115d546-c3c2-419a-8ad2-e1c293d7274a {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1996.060467] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_c115d546-c3c2-419a-8ad2-e1c293d7274a {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1996.060749] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f9885f83-ae87-474a-a096-e9a4f7fe5673 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.068219] env[62684]: DEBUG oslo_vmware.api [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1996.068219] env[62684]: value = "task-2053032" [ 1996.068219] env[62684]: _type = "Task" [ 1996.068219] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1996.076111] env[62684]: DEBUG oslo_vmware.api [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053032, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1996.150995] env[62684]: DEBUG nova.network.neutron [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1996.293446] env[62684]: DEBUG nova.network.neutron [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Updating instance_info_cache with network_info: [{"id": "f3a330d0-ed4d-48e9-956c-1f6ee2137ea2", "address": "fa:16:3e:ba:1c:e2", "network": {"id": "944b4fdf-4610-4ecc-acbb-a4943f324b5d", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1439448096-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c64d2a551d44429ca54f58862c64fe9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3a330d0-ed", "ovs_interfaceid": "f3a330d0-ed4d-48e9-956c-1f6ee2137ea2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1996.492012] env[62684]: DEBUG nova.compute.manager [req-a5729249-d0aa-49ac-bc69-42ecf28a6b3e req-f036262c-a8ab-411a-b2f1-cd385dcc31c3 service nova] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Received event network-vif-deleted-1b1935d6-812b-4914-a0c8-8cfc6f58c715 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1996.497271] env[62684]: INFO nova.compute.manager [-] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Took 1.27 seconds to deallocate network for instance. [ 1996.560869] env[62684]: DEBUG nova.scheduler.client.report [None req-24b217e1-18bc-43ab-be88-c0745835e337 tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1996.578378] env[62684]: DEBUG oslo_vmware.api [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053032, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.03615} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1996.579291] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1996.579498] env[62684]: DEBUG oslo_concurrency.lockutils [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Releasing lock "[datastore1] devstack-image-cache_base/0a70b1eb-e88e-4218-abd5-edf1113ecf05/0a70b1eb-e88e-4218-abd5-edf1113ecf05.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1996.579758] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/0a70b1eb-e88e-4218-abd5-edf1113ecf05/0a70b1eb-e88e-4218-abd5-edf1113ecf05.vmdk to [datastore1] 6faeae10-c0bd-4297-b992-c05511fedb21/6faeae10-c0bd-4297-b992-c05511fedb21.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1996.580025] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-76b5ebc9-eb19-464c-9524-3962f260e50e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.586391] env[62684]: DEBUG oslo_vmware.api [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1996.586391] env[62684]: value = "task-2053033" [ 1996.586391] env[62684]: _type = "Task" [ 1996.586391] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1996.593944] env[62684]: DEBUG oslo_vmware.api [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053033, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1996.796716] env[62684]: DEBUG oslo_concurrency.lockutils [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Releasing lock "refresh_cache-548df581-073b-41d4-bcbe-df7342a2beca" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1996.797165] env[62684]: DEBUG nova.compute.manager [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Instance network_info: |[{"id": "f3a330d0-ed4d-48e9-956c-1f6ee2137ea2", "address": "fa:16:3e:ba:1c:e2", "network": {"id": "944b4fdf-4610-4ecc-acbb-a4943f324b5d", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1439448096-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c64d2a551d44429ca54f58862c64fe9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3a330d0-ed", "ovs_interfaceid": "f3a330d0-ed4d-48e9-956c-1f6ee2137ea2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1996.797750] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ba:1c:e2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea00b53a-9c9b-4592-ab95-7e10473f338d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f3a330d0-ed4d-48e9-956c-1f6ee2137ea2', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1996.805917] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Creating folder: Project (c64d2a551d44429ca54f58862c64fe9f). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1996.806287] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bbb92b34-d6cb-4491-884c-b72e40ed0535 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.819077] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Created folder: Project (c64d2a551d44429ca54f58862c64fe9f) in parent group-v421118. [ 1996.819354] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Creating folder: Instances. Parent ref: group-v421303. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1996.819639] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8872af52-eced-492a-920d-f208f0e0617a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.829814] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Created folder: Instances in parent group-v421303. [ 1996.830103] env[62684]: DEBUG oslo.service.loopingcall [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1996.830332] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1996.830559] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6616a8e9-b1dd-4bcd-9eca-a1b228ae5f2a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.853121] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1996.853121] env[62684]: value = "task-2053036" [ 1996.853121] env[62684]: _type = "Task" [ 1996.853121] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1996.861462] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053036, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.003240] env[62684]: DEBUG oslo_concurrency.lockutils [None req-21b37dfa-cf12-48d6-942e-b36d027ce5c8 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1997.066534] env[62684]: DEBUG oslo_concurrency.lockutils [None req-24b217e1-18bc-43ab-be88-c0745835e337 tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.941s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1997.068936] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b4ecfac9-0ca5-4a13-950d-1b3e4d04c5fd tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.236s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1997.069223] env[62684]: DEBUG nova.objects.instance [None req-b4ecfac9-0ca5-4a13-950d-1b3e4d04c5fd tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Lazy-loading 'resources' on Instance uuid 18a97088-fffa-4b77-8ab0-d24f6f84f516 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1997.092485] env[62684]: INFO nova.scheduler.client.report [None req-24b217e1-18bc-43ab-be88-c0745835e337 tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Deleted allocations for instance a3c7943e-7528-41bc-9a20-1e2b57f832e3 [ 1997.100694] env[62684]: DEBUG oslo_vmware.api [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053033, 'name': CopyVirtualDisk_Task} progress is 24%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.160344] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Acquiring lock "fcc937e3-163d-432b-a131-a53c002e5e8d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1997.160589] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Lock "fcc937e3-163d-432b-a131-a53c002e5e8d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1997.364041] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053036, 'name': CreateVM_Task, 'duration_secs': 0.448777} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1997.364274] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1997.365128] env[62684]: DEBUG oslo_concurrency.lockutils [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1997.365170] env[62684]: DEBUG oslo_concurrency.lockutils [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1997.365527] env[62684]: DEBUG oslo_concurrency.lockutils [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1997.365833] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9823db3-98bd-4dd8-b957-4b6224852c80 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.370918] env[62684]: DEBUG oslo_vmware.api [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Waiting for the task: (returnval){ [ 1997.370918] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a0eee9-55ea-a5e9-34c3-54c2cd7197d2" [ 1997.370918] env[62684]: _type = "Task" [ 1997.370918] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1997.379778] env[62684]: DEBUG oslo_vmware.api [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a0eee9-55ea-a5e9-34c3-54c2cd7197d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.599555] env[62684]: DEBUG oslo_vmware.api [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053033, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.606387] env[62684]: DEBUG oslo_concurrency.lockutils [None req-24b217e1-18bc-43ab-be88-c0745835e337 tempest-InstanceActionsV221TestJSON-905217457 tempest-InstanceActionsV221TestJSON-905217457-project-member] Lock "a3c7943e-7528-41bc-9a20-1e2b57f832e3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.942s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1997.667171] env[62684]: DEBUG nova.compute.manager [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1997.882580] env[62684]: DEBUG oslo_vmware.api [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a0eee9-55ea-a5e9-34c3-54c2cd7197d2, 'name': SearchDatastore_Task, 'duration_secs': 0.090158} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1997.882960] env[62684]: DEBUG oslo_concurrency.lockutils [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1997.883196] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1997.883451] env[62684]: DEBUG oslo_concurrency.lockutils [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1997.883607] env[62684]: DEBUG oslo_concurrency.lockutils [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1997.883799] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1997.884117] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-496ce54b-675c-4afc-9197-9ca3e2e65d7a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.901335] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1997.901335] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1997.904666] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7430a4d-071d-4188-8eee-7707b3d4f030 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.910420] env[62684]: DEBUG oslo_vmware.api [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Waiting for the task: (returnval){ [ 1997.910420] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52eb28bf-e1f6-ab05-e0ec-1b41be0c7d76" [ 1997.910420] env[62684]: _type = "Task" [ 1997.910420] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1997.918964] env[62684]: DEBUG oslo_vmware.api [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52eb28bf-e1f6-ab05-e0ec-1b41be0c7d76, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.976571] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cb42c56-e547-4c64-878c-eb3fe2df850d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.984361] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e180b29-cb2f-41b9-b5e0-d88eb6b6f4e7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.018268] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d166fa8-394c-408b-93e1-de1dbe7fa22f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.027945] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30893b13-73d0-4e42-ba1c-d39925c83957 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.043422] env[62684]: DEBUG nova.compute.provider_tree [None req-b4ecfac9-0ca5-4a13-950d-1b3e4d04c5fd tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1998.098664] env[62684]: DEBUG oslo_vmware.api [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053033, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1998.196665] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1998.428236] env[62684]: DEBUG oslo_vmware.api [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52eb28bf-e1f6-ab05-e0ec-1b41be0c7d76, 'name': SearchDatastore_Task, 'duration_secs': 0.085584} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1998.431198] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd1809b5-1cda-4f8c-a7bd-986d9dad2640 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.439270] env[62684]: DEBUG oslo_vmware.api [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Waiting for the task: (returnval){ [ 1998.439270] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c1b704-e75a-fbe5-3a9b-e789a6a414c6" [ 1998.439270] env[62684]: _type = "Task" [ 1998.439270] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1998.449838] env[62684]: DEBUG oslo_vmware.api [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c1b704-e75a-fbe5-3a9b-e789a6a414c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1998.550180] env[62684]: DEBUG nova.scheduler.client.report [None req-b4ecfac9-0ca5-4a13-950d-1b3e4d04c5fd tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1998.598198] env[62684]: DEBUG oslo_vmware.api [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053033, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1998.950383] env[62684]: DEBUG oslo_vmware.api [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c1b704-e75a-fbe5-3a9b-e789a6a414c6, 'name': SearchDatastore_Task, 'duration_secs': 0.091473} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1998.950659] env[62684]: DEBUG oslo_concurrency.lockutils [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1998.950962] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 548df581-073b-41d4-bcbe-df7342a2beca/548df581-073b-41d4-bcbe-df7342a2beca.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1998.951253] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e4cb9146-9d42-4e79-a61a-e097ff2b1a57 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.958475] env[62684]: DEBUG oslo_vmware.api [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Waiting for the task: (returnval){ [ 1998.958475] env[62684]: value = "task-2053037" [ 1998.958475] env[62684]: _type = "Task" [ 1998.958475] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1998.967998] env[62684]: DEBUG oslo_vmware.api [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Task: {'id': task-2053037, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1999.055552] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b4ecfac9-0ca5-4a13-950d-1b3e4d04c5fd tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.986s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1999.058041] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.229s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1999.058419] env[62684]: DEBUG nova.objects.instance [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lazy-loading 'pci_requests' on Instance uuid b4cd871a-30ea-4b7a-98ad-00b8676dc2cd {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1999.094458] env[62684]: INFO nova.scheduler.client.report [None req-b4ecfac9-0ca5-4a13-950d-1b3e4d04c5fd tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Deleted allocations for instance 18a97088-fffa-4b77-8ab0-d24f6f84f516 [ 1999.099657] env[62684]: DEBUG oslo_vmware.api [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053033, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.304804} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1999.101150] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/0a70b1eb-e88e-4218-abd5-edf1113ecf05/0a70b1eb-e88e-4218-abd5-edf1113ecf05.vmdk to [datastore1] 6faeae10-c0bd-4297-b992-c05511fedb21/6faeae10-c0bd-4297-b992-c05511fedb21.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1999.102105] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-727422c1-092a-4b5d-9c96-3ab0f088d9e7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.131261] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] 6faeae10-c0bd-4297-b992-c05511fedb21/6faeae10-c0bd-4297-b992-c05511fedb21.vmdk or device None with type streamOptimized {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1999.131729] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b90f4471-6871-4328-814b-7b1da8ceb9eb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.164083] env[62684]: DEBUG oslo_vmware.api [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1999.164083] env[62684]: value = "task-2053038" [ 1999.164083] env[62684]: _type = "Task" [ 1999.164083] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1999.176419] env[62684]: DEBUG oslo_vmware.api [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053038, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1999.475809] env[62684]: DEBUG oslo_vmware.api [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Task: {'id': task-2053037, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.463666} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1999.476383] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 548df581-073b-41d4-bcbe-df7342a2beca/548df581-073b-41d4-bcbe-df7342a2beca.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1999.479554] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1999.479554] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3128f064-6854-4bbe-b8f1-addd69c1d1bc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.485080] env[62684]: DEBUG oslo_vmware.api [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Waiting for the task: (returnval){ [ 1999.485080] env[62684]: value = "task-2053039" [ 1999.485080] env[62684]: _type = "Task" [ 1999.485080] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1999.493808] env[62684]: DEBUG oslo_vmware.api [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Task: {'id': task-2053039, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1999.562435] env[62684]: DEBUG nova.objects.instance [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lazy-loading 'numa_topology' on Instance uuid b4cd871a-30ea-4b7a-98ad-00b8676dc2cd {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1999.604826] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b4ecfac9-0ca5-4a13-950d-1b3e4d04c5fd tempest-ServersAaction247Test-57978732 tempest-ServersAaction247Test-57978732-project-member] Lock "18a97088-fffa-4b77-8ab0-d24f6f84f516" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.941s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1999.673643] env[62684]: DEBUG oslo_vmware.api [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053038, 'name': ReconfigVM_Task, 'duration_secs': 0.362705} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1999.673939] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Reconfigured VM instance instance-00000040 to attach disk [datastore1] 6faeae10-c0bd-4297-b992-c05511fedb21/6faeae10-c0bd-4297-b992-c05511fedb21.vmdk or device None with type streamOptimized {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1999.674709] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-434cbdd6-1b8f-45f5-bf61-42de0f892826 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.681294] env[62684]: DEBUG oslo_vmware.api [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 1999.681294] env[62684]: value = "task-2053040" [ 1999.681294] env[62684]: _type = "Task" [ 1999.681294] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1999.689900] env[62684]: DEBUG oslo_vmware.api [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053040, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1999.995472] env[62684]: DEBUG oslo_vmware.api [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Task: {'id': task-2053039, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071772} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1999.995833] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1999.996909] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adfc8970-e782-4f4a-b81e-6482211d995e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.020379] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] 548df581-073b-41d4-bcbe-df7342a2beca/548df581-073b-41d4-bcbe-df7342a2beca.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2000.020994] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73c5e27c-0aec-4c20-a3ae-ca2790ee08e3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.044455] env[62684]: DEBUG oslo_vmware.api [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Waiting for the task: (returnval){ [ 2000.044455] env[62684]: value = "task-2053041" [ 2000.044455] env[62684]: _type = "Task" [ 2000.044455] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2000.052584] env[62684]: DEBUG oslo_vmware.api [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Task: {'id': task-2053041, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2000.065292] env[62684]: INFO nova.compute.claims [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2000.102434] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2000.102676] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2000.191944] env[62684]: DEBUG oslo_vmware.api [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053040, 'name': Rename_Task, 'duration_secs': 0.135155} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2000.192364] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2000.192736] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-667dc6b1-2264-464d-9e7b-86c078d674c2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.198886] env[62684]: DEBUG oslo_vmware.api [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 2000.198886] env[62684]: value = "task-2053042" [ 2000.198886] env[62684]: _type = "Task" [ 2000.198886] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2000.206191] env[62684]: DEBUG oslo_vmware.api [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053042, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2000.558837] env[62684]: DEBUG oslo_vmware.api [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Task: {'id': task-2053041, 'name': ReconfigVM_Task, 'duration_secs': 0.340042} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2000.558837] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Reconfigured VM instance instance-00000041 to attach disk [datastore1] 548df581-073b-41d4-bcbe-df7342a2beca/548df581-073b-41d4-bcbe-df7342a2beca.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2000.558837] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-267001dc-f560-4237-b901-545698d0932e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.569036] env[62684]: DEBUG oslo_vmware.api [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Waiting for the task: (returnval){ [ 2000.569036] env[62684]: value = "task-2053043" [ 2000.569036] env[62684]: _type = "Task" [ 2000.569036] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2000.582827] env[62684]: DEBUG oslo_vmware.api [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Task: {'id': task-2053043, 'name': Rename_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2000.619270] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2000.620026] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Starting heal instance info cache {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2000.713094] env[62684]: DEBUG oslo_vmware.api [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053042, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2000.985727] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0193b9f3-3430-4bb7-8138-69d3b9501bd6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.993838] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61b0318b-4a3d-4388-959c-3c4ca3ddee39 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.024189] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-549ca78c-9929-4a24-b8cf-413bb93fd61c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.031564] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-182fa15f-0905-49bd-ac77-38b404e4990e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.044427] env[62684]: DEBUG nova.compute.provider_tree [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2001.076280] env[62684]: DEBUG oslo_vmware.api [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Task: {'id': task-2053043, 'name': Rename_Task, 'duration_secs': 0.252104} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2001.076516] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2001.076763] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3a8d7329-8b62-432f-bc98-633e5ad5b547 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.082913] env[62684]: DEBUG oslo_vmware.api [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Waiting for the task: (returnval){ [ 2001.082913] env[62684]: value = "task-2053044" [ 2001.082913] env[62684]: _type = "Task" [ 2001.082913] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2001.090179] env[62684]: DEBUG oslo_vmware.api [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Task: {'id': task-2053044, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2001.209555] env[62684]: DEBUG oslo_vmware.api [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053042, 'name': PowerOnVM_Task, 'duration_secs': 0.663833} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2001.209822] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2001.210038] env[62684]: INFO nova.compute.manager [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Took 15.17 seconds to spawn the instance on the hypervisor. [ 2001.210231] env[62684]: DEBUG nova.compute.manager [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2001.210987] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-067f0fa6-74bb-46b4-b9f2-646de969196b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.548037] env[62684]: DEBUG nova.scheduler.client.report [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2001.592955] env[62684]: DEBUG oslo_vmware.api [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Task: {'id': task-2053044, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2001.728484] env[62684]: INFO nova.compute.manager [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Took 44.82 seconds to build instance. [ 2002.053196] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.995s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2002.055452] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5a92802f-7ad8-408c-9c51-78ee6b99a4cf tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.399s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2002.055823] env[62684]: DEBUG nova.objects.instance [None req-5a92802f-7ad8-408c-9c51-78ee6b99a4cf tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lazy-loading 'resources' on Instance uuid a56a3fab-e491-44f5-9cf4-2c308138ffc4 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2002.096415] env[62684]: DEBUG oslo_vmware.api [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Task: {'id': task-2053044, 'name': PowerOnVM_Task, 'duration_secs': 0.791814} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2002.097345] env[62684]: INFO nova.network.neutron [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Updating port 8be48385-37eb-4c2e-baf8-404a9aad87de with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 2002.099172] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2002.099366] env[62684]: INFO nova.compute.manager [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Took 8.48 seconds to spawn the instance on the hypervisor. [ 2002.099718] env[62684]: DEBUG nova.compute.manager [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2002.101300] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-585d6d8c-46e9-46b2-a4b9-8145312f1113 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.230388] env[62684]: DEBUG oslo_concurrency.lockutils [None req-53e4378b-bcca-4470-859d-67d58232eafc tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "6faeae10-c0bd-4297-b992-c05511fedb21" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.833s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2002.492342] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f29f234e-ace6-40a7-8d46-e1720b4c2ff9 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "6faeae10-c0bd-4297-b992-c05511fedb21" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2002.492698] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f29f234e-ace6-40a7-8d46-e1720b4c2ff9 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "6faeae10-c0bd-4297-b992-c05511fedb21" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2002.493185] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f29f234e-ace6-40a7-8d46-e1720b4c2ff9 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "6faeae10-c0bd-4297-b992-c05511fedb21-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2002.493267] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f29f234e-ace6-40a7-8d46-e1720b4c2ff9 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "6faeae10-c0bd-4297-b992-c05511fedb21-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2002.493409] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f29f234e-ace6-40a7-8d46-e1720b4c2ff9 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "6faeae10-c0bd-4297-b992-c05511fedb21-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2002.495491] env[62684]: INFO nova.compute.manager [None req-f29f234e-ace6-40a7-8d46-e1720b4c2ff9 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Terminating instance [ 2002.497554] env[62684]: DEBUG nova.compute.manager [None req-f29f234e-ace6-40a7-8d46-e1720b4c2ff9 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2002.497836] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f29f234e-ace6-40a7-8d46-e1720b4c2ff9 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2002.498689] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f4117b7-2952-4bf4-b04e-268756c09d85 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.506976] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f29f234e-ace6-40a7-8d46-e1720b4c2ff9 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2002.507541] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1ce7a739-f74b-4ddb-aa3a-e950ba543a3f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.513020] env[62684]: DEBUG oslo_vmware.api [None req-f29f234e-ace6-40a7-8d46-e1720b4c2ff9 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 2002.513020] env[62684]: value = "task-2053045" [ 2002.513020] env[62684]: _type = "Task" [ 2002.513020] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2002.520804] env[62684]: DEBUG oslo_vmware.api [None req-f29f234e-ace6-40a7-8d46-e1720b4c2ff9 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053045, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2002.621604] env[62684]: INFO nova.compute.manager [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Took 42.16 seconds to build instance. [ 2002.893220] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df752d91-063d-44f9-8a6d-56ccdab000e5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.901049] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5089b9ee-a36e-4263-8a74-3bd2e61b1e08 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.933015] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0393e8f-8736-4c86-bc38-b6b8990e627d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.940688] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc4075de-8ec5-4f6a-98d1-2ac32377ae2c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.955372] env[62684]: DEBUG nova.compute.provider_tree [None req-5a92802f-7ad8-408c-9c51-78ee6b99a4cf tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2003.024457] env[62684]: DEBUG oslo_vmware.api [None req-f29f234e-ace6-40a7-8d46-e1720b4c2ff9 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053045, 'name': PowerOffVM_Task, 'duration_secs': 0.220531} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2003.024762] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f29f234e-ace6-40a7-8d46-e1720b4c2ff9 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2003.024932] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f29f234e-ace6-40a7-8d46-e1720b4c2ff9 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2003.025323] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-74e3b8c2-80e7-47d6-ba9b-ed95870b0f74 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.123837] env[62684]: DEBUG oslo_concurrency.lockutils [None req-16d96a66-b9b0-41a2-a433-1669242da830 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Lock "548df581-073b-41d4-bcbe-df7342a2beca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.690s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2003.155366] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f29f234e-ace6-40a7-8d46-e1720b4c2ff9 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2003.155582] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f29f234e-ace6-40a7-8d46-e1720b4c2ff9 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2003.155769] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-f29f234e-ace6-40a7-8d46-e1720b4c2ff9 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Deleting the datastore file [datastore1] 6faeae10-c0bd-4297-b992-c05511fedb21 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2003.156055] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-00c5b11e-eecf-459e-956b-4908567d27f1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.162947] env[62684]: DEBUG oslo_vmware.api [None req-f29f234e-ace6-40a7-8d46-e1720b4c2ff9 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 2003.162947] env[62684]: value = "task-2053047" [ 2003.162947] env[62684]: _type = "Task" [ 2003.162947] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2003.170963] env[62684]: DEBUG oslo_vmware.api [None req-f29f234e-ace6-40a7-8d46-e1720b4c2ff9 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053047, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2003.228772] env[62684]: INFO nova.compute.manager [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Rescuing [ 2003.229102] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Acquiring lock "refresh_cache-548df581-073b-41d4-bcbe-df7342a2beca" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2003.230092] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Acquired lock "refresh_cache-548df581-073b-41d4-bcbe-df7342a2beca" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2003.230092] env[62684]: DEBUG nova.network.neutron [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2003.461025] env[62684]: DEBUG nova.scheduler.client.report [None req-5a92802f-7ad8-408c-9c51-78ee6b99a4cf tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2003.491146] env[62684]: DEBUG nova.compute.manager [req-57126a45-4c3b-4f6a-9c5e-ad373d02fd61 req-f1b40770-235e-4c3a-b843-9e2b5d977bea service nova] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Received event network-vif-plugged-8be48385-37eb-4c2e-baf8-404a9aad87de {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2003.491146] env[62684]: DEBUG oslo_concurrency.lockutils [req-57126a45-4c3b-4f6a-9c5e-ad373d02fd61 req-f1b40770-235e-4c3a-b843-9e2b5d977bea service nova] Acquiring lock "b4cd871a-30ea-4b7a-98ad-00b8676dc2cd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2003.491522] env[62684]: DEBUG oslo_concurrency.lockutils [req-57126a45-4c3b-4f6a-9c5e-ad373d02fd61 req-f1b40770-235e-4c3a-b843-9e2b5d977bea service nova] Lock "b4cd871a-30ea-4b7a-98ad-00b8676dc2cd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2003.491867] env[62684]: DEBUG oslo_concurrency.lockutils [req-57126a45-4c3b-4f6a-9c5e-ad373d02fd61 req-f1b40770-235e-4c3a-b843-9e2b5d977bea service nova] Lock "b4cd871a-30ea-4b7a-98ad-00b8676dc2cd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2003.492193] env[62684]: DEBUG nova.compute.manager [req-57126a45-4c3b-4f6a-9c5e-ad373d02fd61 req-f1b40770-235e-4c3a-b843-9e2b5d977bea service nova] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] No waiting events found dispatching network-vif-plugged-8be48385-37eb-4c2e-baf8-404a9aad87de {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2003.492505] env[62684]: WARNING nova.compute.manager [req-57126a45-4c3b-4f6a-9c5e-ad373d02fd61 req-f1b40770-235e-4c3a-b843-9e2b5d977bea service nova] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Received unexpected event network-vif-plugged-8be48385-37eb-4c2e-baf8-404a9aad87de for instance with vm_state shelved_offloaded and task_state spawning. [ 2003.585942] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquiring lock "refresh_cache-b4cd871a-30ea-4b7a-98ad-00b8676dc2cd" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2003.585942] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquired lock "refresh_cache-b4cd871a-30ea-4b7a-98ad-00b8676dc2cd" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2003.585942] env[62684]: DEBUG nova.network.neutron [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2003.674404] env[62684]: DEBUG oslo_vmware.api [None req-f29f234e-ace6-40a7-8d46-e1720b4c2ff9 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053047, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.130569} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2003.675066] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-f29f234e-ace6-40a7-8d46-e1720b4c2ff9 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2003.675066] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f29f234e-ace6-40a7-8d46-e1720b4c2ff9 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2003.675188] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f29f234e-ace6-40a7-8d46-e1720b4c2ff9 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2003.675336] env[62684]: INFO nova.compute.manager [None req-f29f234e-ace6-40a7-8d46-e1720b4c2ff9 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Took 1.18 seconds to destroy the instance on the hypervisor. [ 2003.675575] env[62684]: DEBUG oslo.service.loopingcall [None req-f29f234e-ace6-40a7-8d46-e1720b4c2ff9 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2003.675768] env[62684]: DEBUG nova.compute.manager [-] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2003.675862] env[62684]: DEBUG nova.network.neutron [-] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2003.964133] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5a92802f-7ad8-408c-9c51-78ee6b99a4cf tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.909s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2003.968695] env[62684]: DEBUG nova.network.neutron [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Updating instance_info_cache with network_info: [{"id": "f3a330d0-ed4d-48e9-956c-1f6ee2137ea2", "address": "fa:16:3e:ba:1c:e2", "network": {"id": "944b4fdf-4610-4ecc-acbb-a4943f324b5d", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1439448096-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c64d2a551d44429ca54f58862c64fe9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3a330d0-ed", "ovs_interfaceid": "f3a330d0-ed4d-48e9-956c-1f6ee2137ea2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2003.969943] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.026s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2003.971934] env[62684]: INFO nova.compute.claims [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2003.989197] env[62684]: INFO nova.scheduler.client.report [None req-5a92802f-7ad8-408c-9c51-78ee6b99a4cf tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Deleted allocations for instance a56a3fab-e491-44f5-9cf4-2c308138ffc4 [ 2004.421357] env[62684]: DEBUG nova.network.neutron [-] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2004.429419] env[62684]: DEBUG nova.network.neutron [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Updating instance_info_cache with network_info: [{"id": "8be48385-37eb-4c2e-baf8-404a9aad87de", "address": "fa:16:3e:33:7c:80", "network": {"id": "64494ea7-f6d9-430c-8ac7-e876e763004b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2056829508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.249", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e57b232a7e7647c7a3b2bca3c096feb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8be48385-37", "ovs_interfaceid": "8be48385-37eb-4c2e-baf8-404a9aad87de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2004.478708] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Releasing lock "refresh_cache-548df581-073b-41d4-bcbe-df7342a2beca" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2004.499103] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5a92802f-7ad8-408c-9c51-78ee6b99a4cf tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "a56a3fab-e491-44f5-9cf4-2c308138ffc4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.449s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2004.924262] env[62684]: INFO nova.compute.manager [-] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Took 1.25 seconds to deallocate network for instance. [ 2004.932720] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Releasing lock "refresh_cache-b4cd871a-30ea-4b7a-98ad-00b8676dc2cd" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2004.955883] env[62684]: DEBUG nova.virt.hardware [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='4fdc87ffb0827bfaedbd6bb1f8b9d4ac',container_format='bare',created_at=2025-01-10T07:48:58Z,direct_url=,disk_format='vmdk',id=2116318e-3b46-4c4b-83f6-3ab3a26c5100,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1805977318-shelved',owner='e57b232a7e7647c7a3b2bca3c096feb7',properties=ImageMetaProps,protected=,size=31668224,status='active',tags=,updated_at=2025-01-10T07:49:14Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2004.956182] env[62684]: DEBUG nova.virt.hardware [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2004.956355] env[62684]: DEBUG nova.virt.hardware [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2004.956577] env[62684]: DEBUG nova.virt.hardware [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2004.956751] env[62684]: DEBUG nova.virt.hardware [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2004.956909] env[62684]: DEBUG nova.virt.hardware [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2004.957144] env[62684]: DEBUG nova.virt.hardware [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2004.957317] env[62684]: DEBUG nova.virt.hardware [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2004.957500] env[62684]: DEBUG nova.virt.hardware [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2004.957721] env[62684]: DEBUG nova.virt.hardware [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2004.957921] env[62684]: DEBUG nova.virt.hardware [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2004.958812] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b69fc9b3-c1bb-4f8a-bbb7-0731ee891852 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.967930] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e78aba54-8e49-4123-8ea5-ee2a6400f00d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.982214] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:33:7c:80', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6966f473-59ac-49bb-9b7a-22c61f4e61e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8be48385-37eb-4c2e-baf8-404a9aad87de', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2004.989519] env[62684]: DEBUG oslo.service.loopingcall [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2004.992251] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2004.992671] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-95658cb4-43b1-4b74-9244-5d99b904cfc8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.012518] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2005.012607] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1f8fd3bf-82eb-4d34-9376-f45dc48bc954 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.016631] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2005.016631] env[62684]: value = "task-2053048" [ 2005.016631] env[62684]: _type = "Task" [ 2005.016631] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2005.020338] env[62684]: DEBUG oslo_vmware.api [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Waiting for the task: (returnval){ [ 2005.020338] env[62684]: value = "task-2053049" [ 2005.020338] env[62684]: _type = "Task" [ 2005.020338] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2005.029366] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053048, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2005.035613] env[62684]: DEBUG oslo_vmware.api [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Task: {'id': task-2053049, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2005.193951] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "refresh_cache-dcb0a5b2-379e-44ff-a9b0-be615943c94e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2005.194120] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired lock "refresh_cache-dcb0a5b2-379e-44ff-a9b0-be615943c94e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2005.194275] env[62684]: DEBUG nova.network.neutron [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Forcefully refreshing network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2005.233235] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "a1b7c2a7-f21d-41f4-9102-e656b8205e1f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2005.233472] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "a1b7c2a7-f21d-41f4-9102-e656b8205e1f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2005.325408] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09cd2fab-e494-45cf-aa69-5cae79d6de88 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.332393] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2811f6ed-d1f5-44bf-a2fb-d2781501c3cb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.361666] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a777af4-6289-42d4-90dd-49efdd5a8dcf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.368946] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58a241b0-ce26-4f9a-a196-3d04d949c6f2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.382026] env[62684]: DEBUG nova.compute.provider_tree [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2005.431540] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f29f234e-ace6-40a7-8d46-e1720b4c2ff9 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2005.517613] env[62684]: DEBUG nova.compute.manager [req-5b6be3e7-f546-4eff-bbc6-9f8418038113 req-482b0fb5-514c-4c0b-b99d-278c72b9d403 service nova] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Received event network-changed-8be48385-37eb-4c2e-baf8-404a9aad87de {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2005.517856] env[62684]: DEBUG nova.compute.manager [req-5b6be3e7-f546-4eff-bbc6-9f8418038113 req-482b0fb5-514c-4c0b-b99d-278c72b9d403 service nova] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Refreshing instance network info cache due to event network-changed-8be48385-37eb-4c2e-baf8-404a9aad87de. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2005.518102] env[62684]: DEBUG oslo_concurrency.lockutils [req-5b6be3e7-f546-4eff-bbc6-9f8418038113 req-482b0fb5-514c-4c0b-b99d-278c72b9d403 service nova] Acquiring lock "refresh_cache-b4cd871a-30ea-4b7a-98ad-00b8676dc2cd" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2005.518254] env[62684]: DEBUG oslo_concurrency.lockutils [req-5b6be3e7-f546-4eff-bbc6-9f8418038113 req-482b0fb5-514c-4c0b-b99d-278c72b9d403 service nova] Acquired lock "refresh_cache-b4cd871a-30ea-4b7a-98ad-00b8676dc2cd" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2005.518831] env[62684]: DEBUG nova.network.neutron [req-5b6be3e7-f546-4eff-bbc6-9f8418038113 req-482b0fb5-514c-4c0b-b99d-278c72b9d403 service nova] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Refreshing network info cache for port 8be48385-37eb-4c2e-baf8-404a9aad87de {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2005.532469] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053048, 'name': CreateVM_Task, 'duration_secs': 0.494441} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2005.535173] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2005.535679] env[62684]: DEBUG oslo_vmware.api [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Task: {'id': task-2053049, 'name': PowerOffVM_Task, 'duration_secs': 0.182018} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2005.536299] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2116318e-3b46-4c4b-83f6-3ab3a26c5100" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2005.536466] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2116318e-3b46-4c4b-83f6-3ab3a26c5100" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2005.536849] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2116318e-3b46-4c4b-83f6-3ab3a26c5100" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2005.537138] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2005.537607] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f60e2e4-aa38-4cd3-910f-dc2bcb51a782 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.539880] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5262124-d616-4986-b3c4-45ae112d8589 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.545339] env[62684]: DEBUG oslo_vmware.api [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 2005.545339] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521b605e-3e76-5b19-4b81-11b218ddc660" [ 2005.545339] env[62684]: _type = "Task" [ 2005.545339] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2005.563088] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ecdb7be-2bac-4843-9a30-e33f14e10f3d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.572313] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2116318e-3b46-4c4b-83f6-3ab3a26c5100" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2005.572563] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Processing image 2116318e-3b46-4c4b-83f6-3ab3a26c5100 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2005.572812] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2116318e-3b46-4c4b-83f6-3ab3a26c5100/2116318e-3b46-4c4b-83f6-3ab3a26c5100.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2005.572968] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2116318e-3b46-4c4b-83f6-3ab3a26c5100/2116318e-3b46-4c4b-83f6-3ab3a26c5100.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2005.573164] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2005.575095] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dea19779-2a45-470e-a9e2-5eef835456c8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.582127] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2005.582313] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2005.582979] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9909fbf1-c4f6-465b-9383-a411da67919c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.588298] env[62684]: DEBUG oslo_vmware.api [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 2005.588298] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52108f9e-53e4-ff92-787e-57ea6037330e" [ 2005.588298] env[62684]: _type = "Task" [ 2005.588298] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2005.597473] env[62684]: DEBUG oslo_vmware.api [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52108f9e-53e4-ff92-787e-57ea6037330e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2005.599224] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2005.599453] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-331e9353-0b51-49cf-bfce-85f6406f51d9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.605573] env[62684]: DEBUG oslo_vmware.api [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Waiting for the task: (returnval){ [ 2005.605573] env[62684]: value = "task-2053050" [ 2005.605573] env[62684]: _type = "Task" [ 2005.605573] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2005.612527] env[62684]: DEBUG oslo_vmware.api [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Task: {'id': task-2053050, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2005.735846] env[62684]: DEBUG nova.compute.manager [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2005.884914] env[62684]: DEBUG nova.scheduler.client.report [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2006.100502] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Preparing fetch location {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2006.100779] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Fetch image to [datastore2] OSTACK_IMG_bb989171-195a-4723-921d-8bb2b49ecd24/OSTACK_IMG_bb989171-195a-4723-921d-8bb2b49ecd24.vmdk {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2006.100968] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Downloading stream optimized image 2116318e-3b46-4c4b-83f6-3ab3a26c5100 to [datastore2] OSTACK_IMG_bb989171-195a-4723-921d-8bb2b49ecd24/OSTACK_IMG_bb989171-195a-4723-921d-8bb2b49ecd24.vmdk on the data store datastore2 as vApp {{(pid=62684) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 2006.101168] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Downloading image file data 2116318e-3b46-4c4b-83f6-3ab3a26c5100 to the ESX as VM named 'OSTACK_IMG_bb989171-195a-4723-921d-8bb2b49ecd24' {{(pid=62684) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 2006.114563] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] VM already powered off {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2006.114819] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2006.115086] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2006.115247] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2006.115565] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2006.117250] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0ef8e608-b09d-4b60-8c7d-224868fcfa59 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.143866] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2006.143866] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2006.144063] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f930c39-658c-4b4e-9ffa-07ea89d50d5b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.150779] env[62684]: DEBUG oslo_vmware.api [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Waiting for the task: (returnval){ [ 2006.150779] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f1da79-d696-9336-e07e-2a6a6ff518aa" [ 2006.150779] env[62684]: _type = "Task" [ 2006.150779] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2006.160942] env[62684]: DEBUG oslo_vmware.api [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f1da79-d696-9336-e07e-2a6a6ff518aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2006.175247] env[62684]: DEBUG oslo_vmware.rw_handles [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 2006.175247] env[62684]: value = "resgroup-9" [ 2006.175247] env[62684]: _type = "ResourcePool" [ 2006.175247] env[62684]: }. {{(pid=62684) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 2006.175503] env[62684]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-a37f56f7-b5e3-4570-813d-a659c286338a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.195701] env[62684]: DEBUG oslo_vmware.rw_handles [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lease: (returnval){ [ 2006.195701] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]527a9153-a951-e8e2-1bc4-ba7168629c5d" [ 2006.195701] env[62684]: _type = "HttpNfcLease" [ 2006.195701] env[62684]: } obtained for vApp import into resource pool (val){ [ 2006.195701] env[62684]: value = "resgroup-9" [ 2006.195701] env[62684]: _type = "ResourcePool" [ 2006.195701] env[62684]: }. {{(pid=62684) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 2006.196017] env[62684]: DEBUG oslo_vmware.api [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the lease: (returnval){ [ 2006.196017] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]527a9153-a951-e8e2-1bc4-ba7168629c5d" [ 2006.196017] env[62684]: _type = "HttpNfcLease" [ 2006.196017] env[62684]: } to be ready. {{(pid=62684) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2006.202125] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2006.202125] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]527a9153-a951-e8e2-1bc4-ba7168629c5d" [ 2006.202125] env[62684]: _type = "HttpNfcLease" [ 2006.202125] env[62684]: } is initializing. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2006.271305] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2006.334795] env[62684]: DEBUG nova.network.neutron [req-5b6be3e7-f546-4eff-bbc6-9f8418038113 req-482b0fb5-514c-4c0b-b99d-278c72b9d403 service nova] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Updated VIF entry in instance network info cache for port 8be48385-37eb-4c2e-baf8-404a9aad87de. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2006.335232] env[62684]: DEBUG nova.network.neutron [req-5b6be3e7-f546-4eff-bbc6-9f8418038113 req-482b0fb5-514c-4c0b-b99d-278c72b9d403 service nova] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Updating instance_info_cache with network_info: [{"id": "8be48385-37eb-4c2e-baf8-404a9aad87de", "address": "fa:16:3e:33:7c:80", "network": {"id": "64494ea7-f6d9-430c-8ac7-e876e763004b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2056829508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.249", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e57b232a7e7647c7a3b2bca3c096feb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8be48385-37", "ovs_interfaceid": "8be48385-37eb-4c2e-baf8-404a9aad87de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2006.392473] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.422s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2006.393069] env[62684]: DEBUG nova.compute.manager [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2006.398368] env[62684]: DEBUG oslo_concurrency.lockutils [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 27.767s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2006.436096] env[62684]: DEBUG nova.network.neutron [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Updating instance_info_cache with network_info: [{"id": "617e9c54-b56e-4945-b890-de6be33b657b", "address": "fa:16:3e:11:51:36", "network": {"id": "bca0ee43-bbb1-483b-9d82-56955369f9b7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1592250106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aef5d7061c834332b9f9c5c75596bf08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bfa7abe-7e46-4d8f-b50a-4d0c4509e4dc", "external-id": "nsx-vlan-transportzone-951", "segmentation_id": 951, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap617e9c54-b5", "ovs_interfaceid": "617e9c54-b56e-4945-b890-de6be33b657b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2006.660861] env[62684]: DEBUG oslo_vmware.api [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f1da79-d696-9336-e07e-2a6a6ff518aa, 'name': SearchDatastore_Task, 'duration_secs': 0.010821} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2006.662045] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3815b7ff-5676-445e-9f39-f33eb7f4da67 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.667098] env[62684]: DEBUG oslo_vmware.api [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Waiting for the task: (returnval){ [ 2006.667098] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52639464-d12c-8dd3-9b9d-771119ee9951" [ 2006.667098] env[62684]: _type = "Task" [ 2006.667098] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2006.674661] env[62684]: DEBUG oslo_vmware.api [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52639464-d12c-8dd3-9b9d-771119ee9951, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2006.703346] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2006.703346] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]527a9153-a951-e8e2-1bc4-ba7168629c5d" [ 2006.703346] env[62684]: _type = "HttpNfcLease" [ 2006.703346] env[62684]: } is initializing. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2006.837930] env[62684]: DEBUG oslo_concurrency.lockutils [req-5b6be3e7-f546-4eff-bbc6-9f8418038113 req-482b0fb5-514c-4c0b-b99d-278c72b9d403 service nova] Releasing lock "refresh_cache-b4cd871a-30ea-4b7a-98ad-00b8676dc2cd" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2006.838313] env[62684]: DEBUG nova.compute.manager [req-5b6be3e7-f546-4eff-bbc6-9f8418038113 req-482b0fb5-514c-4c0b-b99d-278c72b9d403 service nova] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Received event network-vif-deleted-e41f6f98-c890-458a-b130-50e41463c0e4 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2006.902243] env[62684]: DEBUG nova.compute.utils [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2006.903873] env[62684]: DEBUG nova.objects.instance [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Lazy-loading 'migration_context' on Instance uuid 02dc8c41-5092-4f84-9722-37d4df3a459a {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2006.905042] env[62684]: DEBUG nova.compute.manager [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2006.905217] env[62684]: DEBUG nova.network.neutron [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2006.938380] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Releasing lock "refresh_cache-dcb0a5b2-379e-44ff-a9b0-be615943c94e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2006.938604] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Updated the network info_cache for instance {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 2006.938835] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2006.939034] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2006.939194] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2006.939345] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2006.939486] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2006.939631] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2006.939758] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62684) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2006.939901] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2006.944022] env[62684]: DEBUG nova.policy [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e957449ae9d24bdaba38b3db704d3d61', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5cb4900a999e467bafdfd1fb407a82f4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2007.182553] env[62684]: DEBUG oslo_vmware.api [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52639464-d12c-8dd3-9b9d-771119ee9951, 'name': SearchDatastore_Task, 'duration_secs': 0.009261} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2007.182859] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2007.183152] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 548df581-073b-41d4-bcbe-df7342a2beca/3931321c-cb4c-4b87-8d3a-50e05ea01db2-rescue.vmdk. {{(pid=62684) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 2007.183444] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8d9e4dae-9a44-4a05-84f2-460494114c04 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.190837] env[62684]: DEBUG oslo_vmware.api [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Waiting for the task: (returnval){ [ 2007.190837] env[62684]: value = "task-2053052" [ 2007.190837] env[62684]: _type = "Task" [ 2007.190837] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2007.199103] env[62684]: DEBUG oslo_vmware.api [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Task: {'id': task-2053052, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2007.205286] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2007.205286] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]527a9153-a951-e8e2-1bc4-ba7168629c5d" [ 2007.205286] env[62684]: _type = "HttpNfcLease" [ 2007.205286] env[62684]: } is ready. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2007.205567] env[62684]: DEBUG oslo_vmware.rw_handles [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2007.205567] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]527a9153-a951-e8e2-1bc4-ba7168629c5d" [ 2007.205567] env[62684]: _type = "HttpNfcLease" [ 2007.205567] env[62684]: }. {{(pid=62684) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 2007.206304] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0de62476-e9bd-40cf-9868-2376e3660a71 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.214607] env[62684]: DEBUG oslo_vmware.rw_handles [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525388d5-c3e9-4df5-ea1a-c4c80a199a87/disk-0.vmdk from lease info. {{(pid=62684) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2007.215668] env[62684]: DEBUG oslo_vmware.rw_handles [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Creating HTTP connection to write to file with size = 31668224 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525388d5-c3e9-4df5-ea1a-c4c80a199a87/disk-0.vmdk. {{(pid=62684) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2007.216765] env[62684]: DEBUG nova.network.neutron [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Successfully created port: fafc2062-9754-4ce0-8647-362b6bb8f8d7 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2007.289056] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-eb5d77f9-9b17-4dd4-b4c7-32cd125f55b3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.405836] env[62684]: DEBUG nova.compute.manager [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2007.443453] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2007.704978] env[62684]: DEBUG oslo_vmware.api [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Task: {'id': task-2053052, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.511448} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2007.706749] env[62684]: INFO nova.virt.vmwareapi.ds_util [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 548df581-073b-41d4-bcbe-df7342a2beca/3931321c-cb4c-4b87-8d3a-50e05ea01db2-rescue.vmdk. [ 2007.710456] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2689586-8c13-4adb-b30e-52c4d44936e0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.739394] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] 548df581-073b-41d4-bcbe-df7342a2beca/3931321c-cb4c-4b87-8d3a-50e05ea01db2-rescue.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2007.746066] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9abd898e-d572-4346-94fa-866e34f88c77 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.774394] env[62684]: DEBUG oslo_vmware.api [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Waiting for the task: (returnval){ [ 2007.774394] env[62684]: value = "task-2053053" [ 2007.774394] env[62684]: _type = "Task" [ 2007.774394] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2007.788214] env[62684]: DEBUG oslo_vmware.api [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Task: {'id': task-2053053, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2007.870217] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f93d0b9c-b95f-4aa5-8741-419cb35f2c38 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.880827] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c576486d-3885-4ef5-8fb9-d12ca608e816 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.922240] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f417e6e7-06ee-4320-a66a-5bfcca52c985 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.931507] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11e08d29-18bf-4596-88d9-e9962a76ff8b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.947075] env[62684]: DEBUG nova.compute.provider_tree [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2008.284061] env[62684]: DEBUG oslo_vmware.api [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Task: {'id': task-2053053, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2008.427077] env[62684]: DEBUG nova.compute.manager [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2008.450399] env[62684]: DEBUG nova.scheduler.client.report [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2008.525631] env[62684]: DEBUG nova.virt.hardware [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2008.525849] env[62684]: DEBUG nova.virt.hardware [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2008.525928] env[62684]: DEBUG nova.virt.hardware [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2008.526099] env[62684]: DEBUG nova.virt.hardware [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2008.526270] env[62684]: DEBUG nova.virt.hardware [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2008.526424] env[62684]: DEBUG nova.virt.hardware [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2008.526840] env[62684]: DEBUG nova.virt.hardware [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2008.526927] env[62684]: DEBUG nova.virt.hardware [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2008.527072] env[62684]: DEBUG nova.virt.hardware [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2008.527261] env[62684]: DEBUG nova.virt.hardware [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2008.527484] env[62684]: DEBUG nova.virt.hardware [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2008.528438] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-470bd207-7dae-4fd9-9acb-1d90eb2ab08f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.536644] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-420828c8-d989-4a2d-a693-17878c171e1e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.686201] env[62684]: DEBUG nova.compute.manager [req-4e6f791c-0ad2-475d-8165-b85836119e04 req-bf056256-4623-4c87-a87c-24404406b96a service nova] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Received event network-vif-plugged-fafc2062-9754-4ce0-8647-362b6bb8f8d7 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2008.686517] env[62684]: DEBUG oslo_concurrency.lockutils [req-4e6f791c-0ad2-475d-8165-b85836119e04 req-bf056256-4623-4c87-a87c-24404406b96a service nova] Acquiring lock "81b7949d-be24-46c9-8dc8-c249b65bb039-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2008.686700] env[62684]: DEBUG oslo_concurrency.lockutils [req-4e6f791c-0ad2-475d-8165-b85836119e04 req-bf056256-4623-4c87-a87c-24404406b96a service nova] Lock "81b7949d-be24-46c9-8dc8-c249b65bb039-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2008.686946] env[62684]: DEBUG oslo_concurrency.lockutils [req-4e6f791c-0ad2-475d-8165-b85836119e04 req-bf056256-4623-4c87-a87c-24404406b96a service nova] Lock "81b7949d-be24-46c9-8dc8-c249b65bb039-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2008.687151] env[62684]: DEBUG nova.compute.manager [req-4e6f791c-0ad2-475d-8165-b85836119e04 req-bf056256-4623-4c87-a87c-24404406b96a service nova] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] No waiting events found dispatching network-vif-plugged-fafc2062-9754-4ce0-8647-362b6bb8f8d7 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2008.687363] env[62684]: WARNING nova.compute.manager [req-4e6f791c-0ad2-475d-8165-b85836119e04 req-bf056256-4623-4c87-a87c-24404406b96a service nova] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Received unexpected event network-vif-plugged-fafc2062-9754-4ce0-8647-362b6bb8f8d7 for instance with vm_state building and task_state spawning. [ 2008.785780] env[62684]: DEBUG oslo_vmware.api [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Task: {'id': task-2053053, 'name': ReconfigVM_Task, 'duration_secs': 0.718234} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2008.785780] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Reconfigured VM instance instance-00000041 to attach disk [datastore1] 548df581-073b-41d4-bcbe-df7342a2beca/3931321c-cb4c-4b87-8d3a-50e05ea01db2-rescue.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2008.786022] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d52091b-0a3e-44fc-ba27-5abfb8a18bb4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.790645] env[62684]: DEBUG nova.network.neutron [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Successfully updated port: fafc2062-9754-4ce0-8647-362b6bb8f8d7 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2008.815560] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "refresh_cache-81b7949d-be24-46c9-8dc8-c249b65bb039" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2008.815688] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquired lock "refresh_cache-81b7949d-be24-46c9-8dc8-c249b65bb039" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2008.815741] env[62684]: DEBUG nova.network.neutron [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2008.820275] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1edc2eef-0eaf-4e11-a8ff-054138d0f327 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.842530] env[62684]: DEBUG oslo_vmware.api [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Waiting for the task: (returnval){ [ 2008.842530] env[62684]: value = "task-2053054" [ 2008.842530] env[62684]: _type = "Task" [ 2008.842530] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2008.855193] env[62684]: DEBUG oslo_vmware.api [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Task: {'id': task-2053054, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2008.899550] env[62684]: DEBUG oslo_vmware.rw_handles [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Completed reading data from the image iterator. {{(pid=62684) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2008.899910] env[62684]: DEBUG oslo_vmware.rw_handles [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525388d5-c3e9-4df5-ea1a-c4c80a199a87/disk-0.vmdk. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2008.901142] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a05e7b81-06ff-4ab5-b57f-6424f4e715ca {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.909177] env[62684]: DEBUG oslo_vmware.rw_handles [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525388d5-c3e9-4df5-ea1a-c4c80a199a87/disk-0.vmdk is in state: ready. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2008.909367] env[62684]: DEBUG oslo_vmware.rw_handles [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525388d5-c3e9-4df5-ea1a-c4c80a199a87/disk-0.vmdk. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 2008.909638] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-38959d8d-8074-4253-b475-d73df0a0022b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.923271] env[62684]: DEBUG nova.network.neutron [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2009.083572] env[62684]: DEBUG nova.network.neutron [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Updating instance_info_cache with network_info: [{"id": "fafc2062-9754-4ce0-8647-362b6bb8f8d7", "address": "fa:16:3e:a1:7d:89", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfafc2062-97", "ovs_interfaceid": "fafc2062-9754-4ce0-8647-362b6bb8f8d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2009.104184] env[62684]: DEBUG oslo_vmware.rw_handles [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525388d5-c3e9-4df5-ea1a-c4c80a199a87/disk-0.vmdk. {{(pid=62684) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 2009.104392] env[62684]: INFO nova.virt.vmwareapi.images [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Downloaded image file data 2116318e-3b46-4c4b-83f6-3ab3a26c5100 [ 2009.105609] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b48e72fc-bc29-4937-9da8-b783810f76e5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.122151] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dab830c1-f604-4750-9a5a-f059788cf3cd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.149956] env[62684]: INFO nova.virt.vmwareapi.images [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] The imported VM was unregistered [ 2009.152175] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Caching image {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2009.152449] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Creating directory with path [datastore2] devstack-image-cache_base/2116318e-3b46-4c4b-83f6-3ab3a26c5100 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2009.152718] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f9f34c1c-b7f4-4bb3-bba7-37563466cc74 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.163350] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Created directory with path [datastore2] devstack-image-cache_base/2116318e-3b46-4c4b-83f6-3ab3a26c5100 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2009.163512] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_bb989171-195a-4723-921d-8bb2b49ecd24/OSTACK_IMG_bb989171-195a-4723-921d-8bb2b49ecd24.vmdk to [datastore2] devstack-image-cache_base/2116318e-3b46-4c4b-83f6-3ab3a26c5100/2116318e-3b46-4c4b-83f6-3ab3a26c5100.vmdk. {{(pid=62684) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 2009.163770] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-bd2a69b1-b325-4530-91e5-03ecdf4ca1fb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.170202] env[62684]: DEBUG oslo_vmware.api [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 2009.170202] env[62684]: value = "task-2053056" [ 2009.170202] env[62684]: _type = "Task" [ 2009.170202] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2009.177967] env[62684]: DEBUG oslo_vmware.api [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053056, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2009.354064] env[62684]: DEBUG oslo_vmware.api [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Task: {'id': task-2053054, 'name': ReconfigVM_Task, 'duration_secs': 0.247083} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2009.354309] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2009.354442] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9cfbcab2-6fea-4295-ac33-287710efc894 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.364358] env[62684]: DEBUG oslo_vmware.api [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Waiting for the task: (returnval){ [ 2009.364358] env[62684]: value = "task-2053057" [ 2009.364358] env[62684]: _type = "Task" [ 2009.364358] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2009.374786] env[62684]: DEBUG oslo_vmware.api [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Task: {'id': task-2053057, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2009.463302] env[62684]: DEBUG oslo_concurrency.lockutils [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.065s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2009.470214] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.076s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2009.471918] env[62684]: INFO nova.compute.claims [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2009.585973] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Releasing lock "refresh_cache-81b7949d-be24-46c9-8dc8-c249b65bb039" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2009.586377] env[62684]: DEBUG nova.compute.manager [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Instance network_info: |[{"id": "fafc2062-9754-4ce0-8647-362b6bb8f8d7", "address": "fa:16:3e:a1:7d:89", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfafc2062-97", "ovs_interfaceid": "fafc2062-9754-4ce0-8647-362b6bb8f8d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2009.586911] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a1:7d:89', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f880ac2e-d532-4f54-87bb-998a8d1bca78', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fafc2062-9754-4ce0-8647-362b6bb8f8d7', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2009.594603] env[62684]: DEBUG oslo.service.loopingcall [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2009.595277] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2009.595535] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-401ef38c-5714-4f34-bfa6-dc7830e0030b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.615607] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2009.615607] env[62684]: value = "task-2053058" [ 2009.615607] env[62684]: _type = "Task" [ 2009.615607] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2009.624540] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053058, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2009.681119] env[62684]: DEBUG oslo_vmware.api [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053056, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2009.875765] env[62684]: DEBUG oslo_vmware.api [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Task: {'id': task-2053057, 'name': PowerOnVM_Task, 'duration_secs': 0.461206} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2009.876117] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2009.880685] env[62684]: DEBUG nova.compute.manager [None req-8a2882a1-6c11-45ac-ae9d-5746c3efe8f5 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2009.880685] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55a39fc1-63ac-4eec-b835-71b8a13bc5be {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.128208] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053058, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2010.182116] env[62684]: DEBUG oslo_vmware.api [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053056, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2010.630188] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053058, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2010.683265] env[62684]: DEBUG oslo_vmware.api [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053056, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2010.715827] env[62684]: DEBUG nova.compute.manager [req-c456c71f-97fa-41ab-8efd-1edd8d65cbea req-09d71e3a-7821-4712-9910-ba838b8cfb30 service nova] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Received event network-changed-fafc2062-9754-4ce0-8647-362b6bb8f8d7 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2010.716070] env[62684]: DEBUG nova.compute.manager [req-c456c71f-97fa-41ab-8efd-1edd8d65cbea req-09d71e3a-7821-4712-9910-ba838b8cfb30 service nova] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Refreshing instance network info cache due to event network-changed-fafc2062-9754-4ce0-8647-362b6bb8f8d7. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2010.716295] env[62684]: DEBUG oslo_concurrency.lockutils [req-c456c71f-97fa-41ab-8efd-1edd8d65cbea req-09d71e3a-7821-4712-9910-ba838b8cfb30 service nova] Acquiring lock "refresh_cache-81b7949d-be24-46c9-8dc8-c249b65bb039" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2010.716446] env[62684]: DEBUG oslo_concurrency.lockutils [req-c456c71f-97fa-41ab-8efd-1edd8d65cbea req-09d71e3a-7821-4712-9910-ba838b8cfb30 service nova] Acquired lock "refresh_cache-81b7949d-be24-46c9-8dc8-c249b65bb039" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2010.716673] env[62684]: DEBUG nova.network.neutron [req-c456c71f-97fa-41ab-8efd-1edd8d65cbea req-09d71e3a-7821-4712-9910-ba838b8cfb30 service nova] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Refreshing network info cache for port fafc2062-9754-4ce0-8647-362b6bb8f8d7 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2010.861445] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-357c6f87-f6e7-410c-a373-b482c4036e93 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.870343] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca08aa6-6f21-4724-8248-14d2818af268 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.904201] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49a4d6ca-821b-4b18-85ff-38461c1e89a5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.912870] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c30ff90-71ec-4881-b84c-df827c2b48ef {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.927954] env[62684]: DEBUG nova.compute.provider_tree [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2011.011398] env[62684]: INFO nova.compute.manager [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Swapping old allocation on dict_keys(['c23c281e-ec1f-4876-972e-a98655f2084f']) held by migration 079cb72a-143d-49f7-91bd-12a1ad5c9e3e for instance [ 2011.044041] env[62684]: DEBUG nova.scheduler.client.report [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Overwriting current allocation {'allocations': {'c23c281e-ec1f-4876-972e-a98655f2084f': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 96}}, 'project_id': '147d85277da2482db0c24803c664cb93', 'user_id': '6cc355a545ee470d8082f0a96dafe513', 'consumer_generation': 1} on consumer 02dc8c41-5092-4f84-9722-37d4df3a459a {{(pid=62684) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 2011.128685] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053058, 'name': CreateVM_Task, 'duration_secs': 1.275806} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2011.128685] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2011.131394] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2011.131394] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2011.131394] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2011.131394] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-920822b7-105d-4747-9b04-f4d73883aeed {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.136706] env[62684]: DEBUG oslo_vmware.api [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 2011.136706] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52618d11-c67d-5ced-26f8-ec87904b0c05" [ 2011.136706] env[62684]: _type = "Task" [ 2011.136706] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2011.146665] env[62684]: DEBUG oslo_vmware.api [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52618d11-c67d-5ced-26f8-ec87904b0c05, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.160643] env[62684]: DEBUG oslo_concurrency.lockutils [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquiring lock "refresh_cache-02dc8c41-5092-4f84-9722-37d4df3a459a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2011.160850] env[62684]: DEBUG oslo_concurrency.lockutils [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquired lock "refresh_cache-02dc8c41-5092-4f84-9722-37d4df3a459a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2011.161140] env[62684]: DEBUG nova.network.neutron [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2011.182552] env[62684]: DEBUG oslo_vmware.api [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053056, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.431493] env[62684]: DEBUG nova.scheduler.client.report [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2011.444637] env[62684]: DEBUG nova.network.neutron [req-c456c71f-97fa-41ab-8efd-1edd8d65cbea req-09d71e3a-7821-4712-9910-ba838b8cfb30 service nova] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Updated VIF entry in instance network info cache for port fafc2062-9754-4ce0-8647-362b6bb8f8d7. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2011.445057] env[62684]: DEBUG nova.network.neutron [req-c456c71f-97fa-41ab-8efd-1edd8d65cbea req-09d71e3a-7821-4712-9910-ba838b8cfb30 service nova] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Updating instance_info_cache with network_info: [{"id": "fafc2062-9754-4ce0-8647-362b6bb8f8d7", "address": "fa:16:3e:a1:7d:89", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfafc2062-97", "ovs_interfaceid": "fafc2062-9754-4ce0-8647-362b6bb8f8d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2011.523458] env[62684]: DEBUG nova.compute.manager [req-efa67800-f8a5-4c71-a66c-93dfe4d9571d req-3dc145fe-2492-49f0-b56b-65dc87f7b0d2 service nova] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Received event network-changed-f3a330d0-ed4d-48e9-956c-1f6ee2137ea2 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2011.523631] env[62684]: DEBUG nova.compute.manager [req-efa67800-f8a5-4c71-a66c-93dfe4d9571d req-3dc145fe-2492-49f0-b56b-65dc87f7b0d2 service nova] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Refreshing instance network info cache due to event network-changed-f3a330d0-ed4d-48e9-956c-1f6ee2137ea2. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2011.524085] env[62684]: DEBUG oslo_concurrency.lockutils [req-efa67800-f8a5-4c71-a66c-93dfe4d9571d req-3dc145fe-2492-49f0-b56b-65dc87f7b0d2 service nova] Acquiring lock "refresh_cache-548df581-073b-41d4-bcbe-df7342a2beca" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2011.524085] env[62684]: DEBUG oslo_concurrency.lockutils [req-efa67800-f8a5-4c71-a66c-93dfe4d9571d req-3dc145fe-2492-49f0-b56b-65dc87f7b0d2 service nova] Acquired lock "refresh_cache-548df581-073b-41d4-bcbe-df7342a2beca" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2011.524243] env[62684]: DEBUG nova.network.neutron [req-efa67800-f8a5-4c71-a66c-93dfe4d9571d req-3dc145fe-2492-49f0-b56b-65dc87f7b0d2 service nova] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Refreshing network info cache for port f3a330d0-ed4d-48e9-956c-1f6ee2137ea2 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2011.648143] env[62684]: DEBUG oslo_vmware.api [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52618d11-c67d-5ced-26f8-ec87904b0c05, 'name': SearchDatastore_Task, 'duration_secs': 0.079627} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2011.648507] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2011.648733] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2011.648976] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2011.649181] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2011.649333] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2011.649669] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-75554003-bd6a-4872-a81b-c0679f9790c9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.661940] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2011.662205] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2011.663314] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5047a21e-e549-4601-8422-4497338a3116 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.671252] env[62684]: DEBUG oslo_vmware.api [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 2011.671252] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c9b07b-008e-9f30-6228-f79c03ad486e" [ 2011.671252] env[62684]: _type = "Task" [ 2011.671252] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2011.681740] env[62684]: DEBUG oslo_vmware.api [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c9b07b-008e-9f30-6228-f79c03ad486e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.684857] env[62684]: DEBUG oslo_vmware.api [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053056, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.393732} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2011.685111] env[62684]: INFO nova.virt.vmwareapi.ds_util [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_bb989171-195a-4723-921d-8bb2b49ecd24/OSTACK_IMG_bb989171-195a-4723-921d-8bb2b49ecd24.vmdk to [datastore2] devstack-image-cache_base/2116318e-3b46-4c4b-83f6-3ab3a26c5100/2116318e-3b46-4c4b-83f6-3ab3a26c5100.vmdk. [ 2011.685316] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Cleaning up location [datastore2] OSTACK_IMG_bb989171-195a-4723-921d-8bb2b49ecd24 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 2011.685482] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_bb989171-195a-4723-921d-8bb2b49ecd24 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2011.685757] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-75fd4335-b84d-4857-8a8c-aa6fe1578527 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.691674] env[62684]: DEBUG oslo_vmware.api [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 2011.691674] env[62684]: value = "task-2053059" [ 2011.691674] env[62684]: _type = "Task" [ 2011.691674] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2011.700412] env[62684]: DEBUG oslo_vmware.api [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053059, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.889418] env[62684]: DEBUG nova.network.neutron [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Updating instance_info_cache with network_info: [{"id": "39c750a6-1076-4354-bc30-d7f50ca821b5", "address": "fa:16:3e:16:fe:89", "network": {"id": "c328aa25-c979-44ef-9fe3-8d0b5013533f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.34", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "060baef1c5f640fda05fe7b750aa2f0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39c750a6-10", "ovs_interfaceid": "39c750a6-1076-4354-bc30-d7f50ca821b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2011.938153] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.467s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2011.938153] env[62684]: DEBUG nova.compute.manager [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2011.941058] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7b4899a4-83c3-428d-9229-bd10b1caf5d5 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.065s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2011.941154] env[62684]: DEBUG nova.objects.instance [None req-7b4899a4-83c3-428d-9229-bd10b1caf5d5 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Lazy-loading 'resources' on Instance uuid 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2011.948071] env[62684]: DEBUG oslo_concurrency.lockutils [req-c456c71f-97fa-41ab-8efd-1edd8d65cbea req-09d71e3a-7821-4712-9910-ba838b8cfb30 service nova] Releasing lock "refresh_cache-81b7949d-be24-46c9-8dc8-c249b65bb039" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2012.184176] env[62684]: DEBUG oslo_vmware.api [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c9b07b-008e-9f30-6228-f79c03ad486e, 'name': SearchDatastore_Task, 'duration_secs': 0.010044} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2012.185027] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8af50eff-233f-4b1f-99af-5653f7c0a12f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.192063] env[62684]: DEBUG oslo_vmware.api [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 2012.192063] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52720f46-b35f-0ee4-bd47-c168ef44c307" [ 2012.192063] env[62684]: _type = "Task" [ 2012.192063] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2012.207072] env[62684]: DEBUG oslo_vmware.api [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52720f46-b35f-0ee4-bd47-c168ef44c307, 'name': SearchDatastore_Task, 'duration_secs': 0.008796} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2012.210249] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2012.210611] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 81b7949d-be24-46c9-8dc8-c249b65bb039/81b7949d-be24-46c9-8dc8-c249b65bb039.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2012.211027] env[62684]: DEBUG oslo_vmware.api [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053059, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.036051} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2012.211287] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ec5b6c57-6fd4-4c13-940a-35e3c1020ab5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.214328] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2012.214640] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2116318e-3b46-4c4b-83f6-3ab3a26c5100/2116318e-3b46-4c4b-83f6-3ab3a26c5100.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2012.214973] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2116318e-3b46-4c4b-83f6-3ab3a26c5100/2116318e-3b46-4c4b-83f6-3ab3a26c5100.vmdk to [datastore2] b4cd871a-30ea-4b7a-98ad-00b8676dc2cd/b4cd871a-30ea-4b7a-98ad-00b8676dc2cd.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2012.216045] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-abefa6e4-94b0-42ca-bd9c-18b5458e88e5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.224429] env[62684]: DEBUG oslo_vmware.api [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 2012.224429] env[62684]: value = "task-2053060" [ 2012.224429] env[62684]: _type = "Task" [ 2012.224429] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2012.226173] env[62684]: DEBUG oslo_vmware.api [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 2012.226173] env[62684]: value = "task-2053061" [ 2012.226173] env[62684]: _type = "Task" [ 2012.226173] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2012.238697] env[62684]: DEBUG oslo_vmware.api [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053060, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2012.242245] env[62684]: DEBUG oslo_vmware.api [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053061, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2012.392743] env[62684]: DEBUG oslo_concurrency.lockutils [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Releasing lock "refresh_cache-02dc8c41-5092-4f84-9722-37d4df3a459a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2012.393312] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2012.393624] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8cb05d30-25aa-4b64-ab42-1c8fc80805b0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.401590] env[62684]: DEBUG oslo_vmware.api [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for the task: (returnval){ [ 2012.401590] env[62684]: value = "task-2053062" [ 2012.401590] env[62684]: _type = "Task" [ 2012.401590] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2012.411437] env[62684]: DEBUG oslo_vmware.api [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2053062, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2012.416886] env[62684]: DEBUG nova.network.neutron [req-efa67800-f8a5-4c71-a66c-93dfe4d9571d req-3dc145fe-2492-49f0-b56b-65dc87f7b0d2 service nova] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Updated VIF entry in instance network info cache for port f3a330d0-ed4d-48e9-956c-1f6ee2137ea2. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2012.417305] env[62684]: DEBUG nova.network.neutron [req-efa67800-f8a5-4c71-a66c-93dfe4d9571d req-3dc145fe-2492-49f0-b56b-65dc87f7b0d2 service nova] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Updating instance_info_cache with network_info: [{"id": "f3a330d0-ed4d-48e9-956c-1f6ee2137ea2", "address": "fa:16:3e:ba:1c:e2", "network": {"id": "944b4fdf-4610-4ecc-acbb-a4943f324b5d", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1439448096-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c64d2a551d44429ca54f58862c64fe9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3a330d0-ed", "ovs_interfaceid": "f3a330d0-ed4d-48e9-956c-1f6ee2137ea2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2012.444093] env[62684]: DEBUG nova.compute.utils [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2012.445579] env[62684]: DEBUG nova.compute.manager [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2012.445789] env[62684]: DEBUG nova.network.neutron [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2012.502855] env[62684]: DEBUG nova.policy [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b7b99d78251448edaf1d119509f6dedf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'feab568b5c9e41bfa2ca824d44bcc4e7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2012.742646] env[62684]: DEBUG oslo_vmware.api [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053060, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2012.753195] env[62684]: DEBUG oslo_vmware.api [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053061, 'name': CopyVirtualDisk_Task} progress is 15%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2012.834480] env[62684]: DEBUG nova.network.neutron [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Successfully created port: c77f6606-e602-4667-992c-c6a0e052e01a {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2012.865188] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-187bd852-2d55-460e-98b8-1d6893bda53f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.873797] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bdbcad9-d543-4054-bd61-95f944ea7175 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.913397] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a76bf8e-c5d1-4ca3-a0f1-03555014e9fb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.923513] env[62684]: DEBUG oslo_concurrency.lockutils [req-efa67800-f8a5-4c71-a66c-93dfe4d9571d req-3dc145fe-2492-49f0-b56b-65dc87f7b0d2 service nova] Releasing lock "refresh_cache-548df581-073b-41d4-bcbe-df7342a2beca" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2012.924045] env[62684]: DEBUG oslo_vmware.api [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2053062, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2012.927754] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0837f9eb-daf5-4364-a910-22eddf1e0c45 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.942421] env[62684]: DEBUG nova.compute.provider_tree [None req-7b4899a4-83c3-428d-9229-bd10b1caf5d5 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2012.951648] env[62684]: DEBUG nova.compute.manager [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2013.241223] env[62684]: DEBUG oslo_vmware.api [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053061, 'name': CopyVirtualDisk_Task} progress is 32%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2013.244249] env[62684]: DEBUG oslo_vmware.api [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053060, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.838926} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2013.244577] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 81b7949d-be24-46c9-8dc8-c249b65bb039/81b7949d-be24-46c9-8dc8-c249b65bb039.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2013.244841] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2013.245168] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-69b9e1b2-28e8-49ef-88ec-7f25ad1c5a72 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.253621] env[62684]: DEBUG oslo_vmware.api [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 2013.253621] env[62684]: value = "task-2053063" [ 2013.253621] env[62684]: _type = "Task" [ 2013.253621] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2013.262134] env[62684]: DEBUG oslo_vmware.api [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053063, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2013.421354] env[62684]: DEBUG oslo_vmware.api [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2053062, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2013.445848] env[62684]: DEBUG nova.scheduler.client.report [None req-7b4899a4-83c3-428d-9229-bd10b1caf5d5 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2013.742339] env[62684]: DEBUG oslo_vmware.api [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053061, 'name': CopyVirtualDisk_Task} progress is 54%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2013.763964] env[62684]: DEBUG oslo_vmware.api [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053063, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2013.921964] env[62684]: DEBUG oslo_vmware.api [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2053062, 'name': PowerOffVM_Task, 'duration_secs': 1.05172} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2013.922297] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2013.923014] env[62684]: DEBUG nova.virt.hardware [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:47:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='d1dcf74d-6a75-42cb-977e-e0fc87b2d673',id=39,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1732807329',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2013.923374] env[62684]: DEBUG nova.virt.hardware [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2013.923464] env[62684]: DEBUG nova.virt.hardware [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2013.923593] env[62684]: DEBUG nova.virt.hardware [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2013.923745] env[62684]: DEBUG nova.virt.hardware [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2013.923899] env[62684]: DEBUG nova.virt.hardware [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2013.924129] env[62684]: DEBUG nova.virt.hardware [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2013.924300] env[62684]: DEBUG nova.virt.hardware [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2013.924476] env[62684]: DEBUG nova.virt.hardware [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2013.924749] env[62684]: DEBUG nova.virt.hardware [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2013.924952] env[62684]: DEBUG nova.virt.hardware [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2013.931072] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0cccbacd-96a4-493d-ae60-6e09d4000487 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.943994] env[62684]: DEBUG nova.compute.manager [req-b2024ae8-cb60-40fd-9e4a-2f5ee15e875f req-72732ee0-6aa9-475b-a5c1-1bed0b85f369 service nova] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Received event network-changed-f3a330d0-ed4d-48e9-956c-1f6ee2137ea2 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2013.944235] env[62684]: DEBUG nova.compute.manager [req-b2024ae8-cb60-40fd-9e4a-2f5ee15e875f req-72732ee0-6aa9-475b-a5c1-1bed0b85f369 service nova] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Refreshing instance network info cache due to event network-changed-f3a330d0-ed4d-48e9-956c-1f6ee2137ea2. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2013.945133] env[62684]: DEBUG oslo_concurrency.lockutils [req-b2024ae8-cb60-40fd-9e4a-2f5ee15e875f req-72732ee0-6aa9-475b-a5c1-1bed0b85f369 service nova] Acquiring lock "refresh_cache-548df581-073b-41d4-bcbe-df7342a2beca" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2013.945133] env[62684]: DEBUG oslo_concurrency.lockutils [req-b2024ae8-cb60-40fd-9e4a-2f5ee15e875f req-72732ee0-6aa9-475b-a5c1-1bed0b85f369 service nova] Acquired lock "refresh_cache-548df581-073b-41d4-bcbe-df7342a2beca" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2013.945133] env[62684]: DEBUG nova.network.neutron [req-b2024ae8-cb60-40fd-9e4a-2f5ee15e875f req-72732ee0-6aa9-475b-a5c1-1bed0b85f369 service nova] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Refreshing network info cache for port f3a330d0-ed4d-48e9-956c-1f6ee2137ea2 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2013.952939] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7b4899a4-83c3-428d-9229-bd10b1caf5d5 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.012s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2013.955173] env[62684]: DEBUG oslo_vmware.api [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for the task: (returnval){ [ 2013.955173] env[62684]: value = "task-2053064" [ 2013.955173] env[62684]: _type = "Task" [ 2013.955173] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2013.957127] env[62684]: DEBUG oslo_concurrency.lockutils [None req-21b37dfa-cf12-48d6-942e-b36d027ce5c8 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.953s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2013.957127] env[62684]: DEBUG nova.objects.instance [None req-21b37dfa-cf12-48d6-942e-b36d027ce5c8 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lazy-loading 'resources' on Instance uuid 4a15d298-115f-4132-8be0-00e623fa21d8 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2013.963518] env[62684]: DEBUG nova.compute.manager [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2013.974261] env[62684]: DEBUG oslo_vmware.api [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2053064, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2013.982552] env[62684]: INFO nova.scheduler.client.report [None req-7b4899a4-83c3-428d-9229-bd10b1caf5d5 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Deleted allocations for instance 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21 [ 2014.000484] env[62684]: DEBUG nova.virt.hardware [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2014.000752] env[62684]: DEBUG nova.virt.hardware [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2014.000911] env[62684]: DEBUG nova.virt.hardware [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2014.002393] env[62684]: DEBUG nova.virt.hardware [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2014.002393] env[62684]: DEBUG nova.virt.hardware [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2014.002393] env[62684]: DEBUG nova.virt.hardware [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2014.002393] env[62684]: DEBUG nova.virt.hardware [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2014.002592] env[62684]: DEBUG nova.virt.hardware [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2014.003533] env[62684]: DEBUG nova.virt.hardware [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2014.003533] env[62684]: DEBUG nova.virt.hardware [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2014.003533] env[62684]: DEBUG nova.virt.hardware [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2014.004522] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee0325a9-bc90-4b43-9658-ee1d23fb0616 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.014993] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9718e05f-a7d3-4a95-98b6-1c219e3c25a7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.243330] env[62684]: DEBUG oslo_vmware.api [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053061, 'name': CopyVirtualDisk_Task} progress is 74%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2014.264630] env[62684]: DEBUG oslo_vmware.api [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053063, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.658619} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2014.265944] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2014.266137] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa66cb97-4800-4c43-b9ff-c47b71de946d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.293407] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Reconfiguring VM instance instance-00000042 to attach disk [datastore2] 81b7949d-be24-46c9-8dc8-c249b65bb039/81b7949d-be24-46c9-8dc8-c249b65bb039.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2014.293795] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-04cf4a6c-0a7a-46ad-b306-39e49c9db2f4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.316615] env[62684]: DEBUG oslo_vmware.api [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 2014.316615] env[62684]: value = "task-2053065" [ 2014.316615] env[62684]: _type = "Task" [ 2014.316615] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2014.328281] env[62684]: DEBUG oslo_vmware.api [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053065, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2014.447170] env[62684]: DEBUG nova.network.neutron [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Successfully updated port: c77f6606-e602-4667-992c-c6a0e052e01a {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2014.474866] env[62684]: DEBUG oslo_vmware.api [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2053064, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2014.498666] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7b4899a4-83c3-428d-9229-bd10b1caf5d5 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Lock "2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.621s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2014.508669] env[62684]: DEBUG nova.compute.manager [req-02da4ee7-b8b2-4649-ab8c-1e4f822ffd29 req-2024630a-cdf5-4a05-8ef8-8843e8bf20a9 service nova] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Received event network-vif-plugged-c77f6606-e602-4667-992c-c6a0e052e01a {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2014.508820] env[62684]: DEBUG oslo_concurrency.lockutils [req-02da4ee7-b8b2-4649-ab8c-1e4f822ffd29 req-2024630a-cdf5-4a05-8ef8-8843e8bf20a9 service nova] Acquiring lock "7b29207a-7fa8-4374-819e-c046b2014969-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2014.509031] env[62684]: DEBUG oslo_concurrency.lockutils [req-02da4ee7-b8b2-4649-ab8c-1e4f822ffd29 req-2024630a-cdf5-4a05-8ef8-8843e8bf20a9 service nova] Lock "7b29207a-7fa8-4374-819e-c046b2014969-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2014.509240] env[62684]: DEBUG oslo_concurrency.lockutils [req-02da4ee7-b8b2-4649-ab8c-1e4f822ffd29 req-2024630a-cdf5-4a05-8ef8-8843e8bf20a9 service nova] Lock "7b29207a-7fa8-4374-819e-c046b2014969-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2014.509590] env[62684]: DEBUG nova.compute.manager [req-02da4ee7-b8b2-4649-ab8c-1e4f822ffd29 req-2024630a-cdf5-4a05-8ef8-8843e8bf20a9 service nova] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] No waiting events found dispatching network-vif-plugged-c77f6606-e602-4667-992c-c6a0e052e01a {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2014.509590] env[62684]: WARNING nova.compute.manager [req-02da4ee7-b8b2-4649-ab8c-1e4f822ffd29 req-2024630a-cdf5-4a05-8ef8-8843e8bf20a9 service nova] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Received unexpected event network-vif-plugged-c77f6606-e602-4667-992c-c6a0e052e01a for instance with vm_state building and task_state spawning. [ 2014.678985] env[62684]: DEBUG nova.network.neutron [req-b2024ae8-cb60-40fd-9e4a-2f5ee15e875f req-72732ee0-6aa9-475b-a5c1-1bed0b85f369 service nova] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Updated VIF entry in instance network info cache for port f3a330d0-ed4d-48e9-956c-1f6ee2137ea2. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2014.679426] env[62684]: DEBUG nova.network.neutron [req-b2024ae8-cb60-40fd-9e4a-2f5ee15e875f req-72732ee0-6aa9-475b-a5c1-1bed0b85f369 service nova] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Updating instance_info_cache with network_info: [{"id": "f3a330d0-ed4d-48e9-956c-1f6ee2137ea2", "address": "fa:16:3e:ba:1c:e2", "network": {"id": "944b4fdf-4610-4ecc-acbb-a4943f324b5d", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1439448096-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c64d2a551d44429ca54f58862c64fe9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3a330d0-ed", "ovs_interfaceid": "f3a330d0-ed4d-48e9-956c-1f6ee2137ea2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2014.747751] env[62684]: DEBUG oslo_vmware.api [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053061, 'name': CopyVirtualDisk_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2014.827468] env[62684]: DEBUG oslo_vmware.api [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053065, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2014.841546] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71a4594d-aa1f-4a7f-8e24-aa31afec04e8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.848887] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b6a4b8e-9091-4c73-ab59-21a4352ab3f4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.879229] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27216210-aef1-4595-a7b2-33d574690191 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.886517] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a675780d-c755-4e0c-bb11-ab8fb6133942 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.899809] env[62684]: DEBUG nova.compute.provider_tree [None req-21b37dfa-cf12-48d6-942e-b36d027ce5c8 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2014.953812] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Acquiring lock "refresh_cache-7b29207a-7fa8-4374-819e-c046b2014969" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2014.953812] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Acquired lock "refresh_cache-7b29207a-7fa8-4374-819e-c046b2014969" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2014.953812] env[62684]: DEBUG nova.network.neutron [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2014.969710] env[62684]: DEBUG oslo_vmware.api [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2053064, 'name': ReconfigVM_Task, 'duration_secs': 0.940863} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2014.970630] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68785da2-ff09-4997-8c69-43558609e724 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.988954] env[62684]: DEBUG nova.virt.hardware [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:47:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='d1dcf74d-6a75-42cb-977e-e0fc87b2d673',id=39,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1732807329',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2014.989234] env[62684]: DEBUG nova.virt.hardware [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2014.989404] env[62684]: DEBUG nova.virt.hardware [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2014.989593] env[62684]: DEBUG nova.virt.hardware [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2014.989745] env[62684]: DEBUG nova.virt.hardware [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2014.989909] env[62684]: DEBUG nova.virt.hardware [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2014.990389] env[62684]: DEBUG nova.virt.hardware [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2014.990389] env[62684]: DEBUG nova.virt.hardware [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2014.990585] env[62684]: DEBUG nova.virt.hardware [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2014.990712] env[62684]: DEBUG nova.virt.hardware [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2014.990953] env[62684]: DEBUG nova.virt.hardware [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2014.993984] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-160cc614-ccd8-4d87-9ce3-50fcd818a900 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.997396] env[62684]: DEBUG oslo_vmware.api [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for the task: (returnval){ [ 2014.997396] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521d4d11-fdc0-9289-98d1-06ac68e85d98" [ 2014.997396] env[62684]: _type = "Task" [ 2014.997396] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2015.007977] env[62684]: DEBUG oslo_vmware.api [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521d4d11-fdc0-9289-98d1-06ac68e85d98, 'name': SearchDatastore_Task, 'duration_secs': 0.006944} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2015.013710] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Reconfiguring VM instance instance-00000030 to detach disk 2000 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2015.013974] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-842450bc-fd1f-4c7d-a570-49087004f6ab {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.034808] env[62684]: DEBUG oslo_vmware.api [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for the task: (returnval){ [ 2015.034808] env[62684]: value = "task-2053066" [ 2015.034808] env[62684]: _type = "Task" [ 2015.034808] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2015.043172] env[62684]: DEBUG oslo_vmware.api [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2053066, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2015.182360] env[62684]: DEBUG oslo_concurrency.lockutils [req-b2024ae8-cb60-40fd-9e4a-2f5ee15e875f req-72732ee0-6aa9-475b-a5c1-1bed0b85f369 service nova] Releasing lock "refresh_cache-548df581-073b-41d4-bcbe-df7342a2beca" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2015.182629] env[62684]: DEBUG nova.compute.manager [req-b2024ae8-cb60-40fd-9e4a-2f5ee15e875f req-72732ee0-6aa9-475b-a5c1-1bed0b85f369 service nova] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Received event network-changed-f3a330d0-ed4d-48e9-956c-1f6ee2137ea2 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2015.182882] env[62684]: DEBUG nova.compute.manager [req-b2024ae8-cb60-40fd-9e4a-2f5ee15e875f req-72732ee0-6aa9-475b-a5c1-1bed0b85f369 service nova] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Refreshing instance network info cache due to event network-changed-f3a330d0-ed4d-48e9-956c-1f6ee2137ea2. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2015.183092] env[62684]: DEBUG oslo_concurrency.lockutils [req-b2024ae8-cb60-40fd-9e4a-2f5ee15e875f req-72732ee0-6aa9-475b-a5c1-1bed0b85f369 service nova] Acquiring lock "refresh_cache-548df581-073b-41d4-bcbe-df7342a2beca" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2015.183267] env[62684]: DEBUG oslo_concurrency.lockutils [req-b2024ae8-cb60-40fd-9e4a-2f5ee15e875f req-72732ee0-6aa9-475b-a5c1-1bed0b85f369 service nova] Acquired lock "refresh_cache-548df581-073b-41d4-bcbe-df7342a2beca" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2015.183438] env[62684]: DEBUG nova.network.neutron [req-b2024ae8-cb60-40fd-9e4a-2f5ee15e875f req-72732ee0-6aa9-475b-a5c1-1bed0b85f369 service nova] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Refreshing network info cache for port f3a330d0-ed4d-48e9-956c-1f6ee2137ea2 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2015.243990] env[62684]: DEBUG oslo_vmware.api [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053061, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.611787} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2015.246612] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2116318e-3b46-4c4b-83f6-3ab3a26c5100/2116318e-3b46-4c4b-83f6-3ab3a26c5100.vmdk to [datastore2] b4cd871a-30ea-4b7a-98ad-00b8676dc2cd/b4cd871a-30ea-4b7a-98ad-00b8676dc2cd.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2015.246612] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef0d6a11-7a09-4250-89eb-ade5bb278d15 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.267600] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Reconfiguring VM instance instance-00000025 to attach disk [datastore2] b4cd871a-30ea-4b7a-98ad-00b8676dc2cd/b4cd871a-30ea-4b7a-98ad-00b8676dc2cd.vmdk or device None with type streamOptimized {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2015.268312] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0fe08c70-8a21-40e2-ad30-21b35f520061 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.291391] env[62684]: DEBUG oslo_vmware.api [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 2015.291391] env[62684]: value = "task-2053067" [ 2015.291391] env[62684]: _type = "Task" [ 2015.291391] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2015.303723] env[62684]: DEBUG oslo_vmware.api [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053067, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2015.330387] env[62684]: DEBUG oslo_vmware.api [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053065, 'name': ReconfigVM_Task, 'duration_secs': 0.705473} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2015.330387] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Reconfigured VM instance instance-00000042 to attach disk [datastore2] 81b7949d-be24-46c9-8dc8-c249b65bb039/81b7949d-be24-46c9-8dc8-c249b65bb039.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2015.330387] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d414dc14-e6a9-4350-b400-f509f126c7d6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.334683] env[62684]: DEBUG oslo_vmware.api [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 2015.334683] env[62684]: value = "task-2053068" [ 2015.334683] env[62684]: _type = "Task" [ 2015.334683] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2015.353270] env[62684]: DEBUG oslo_vmware.api [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053068, 'name': Rename_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2015.403810] env[62684]: DEBUG nova.scheduler.client.report [None req-21b37dfa-cf12-48d6-942e-b36d027ce5c8 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2015.492699] env[62684]: DEBUG nova.network.neutron [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2015.545461] env[62684]: DEBUG oslo_vmware.api [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2053066, 'name': ReconfigVM_Task, 'duration_secs': 0.163351} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2015.545858] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Reconfigured VM instance instance-00000030 to detach disk 2000 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2015.546777] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f707d5a-8c5f-494d-9cd2-e9995afc49db {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.572782] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Reconfiguring VM instance instance-00000030 to attach disk [datastore2] 02dc8c41-5092-4f84-9722-37d4df3a459a/02dc8c41-5092-4f84-9722-37d4df3a459a.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2015.573162] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef5027b5-545d-428a-b055-c2944326cb69 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.593652] env[62684]: DEBUG oslo_vmware.api [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for the task: (returnval){ [ 2015.593652] env[62684]: value = "task-2053069" [ 2015.593652] env[62684]: _type = "Task" [ 2015.593652] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2015.601740] env[62684]: DEBUG oslo_vmware.api [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2053069, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2015.721045] env[62684]: DEBUG nova.network.neutron [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Updating instance_info_cache with network_info: [{"id": "c77f6606-e602-4667-992c-c6a0e052e01a", "address": "fa:16:3e:43:c7:f0", "network": {"id": "899020b7-a29e-4a35-bf3c-f9aebda1208d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1035902693-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "feab568b5c9e41bfa2ca824d44bcc4e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "06eaa4c9-dbc2-4d38-a844-7bf76e7b5a64", "external-id": "nsx-vlan-transportzone-804", "segmentation_id": 804, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc77f6606-e6", "ovs_interfaceid": "c77f6606-e602-4667-992c-c6a0e052e01a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2015.801994] env[62684]: DEBUG oslo_vmware.api [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053067, 'name': ReconfigVM_Task, 'duration_secs': 0.276286} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2015.802866] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Reconfigured VM instance instance-00000025 to attach disk [datastore2] b4cd871a-30ea-4b7a-98ad-00b8676dc2cd/b4cd871a-30ea-4b7a-98ad-00b8676dc2cd.vmdk or device None with type streamOptimized {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2015.804520] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'disk_bus': None, 'boot_index': 0, 'device_name': '/dev/sda', 'device_type': 'disk', 'encryption_secret_uuid': None, 'encryption_options': None, 'encrypted': False, 'guest_format': None, 'size': 0, 'encryption_format': None, 'image_id': '3931321c-cb4c-4b87-8d3a-50e05ea01db2'}], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': False, 'disk_bus': None, 'boot_index': None, 'device_type': None, 'attachment_id': 'c29603be-b550-4dbd-9f0c-8437c64fb394', 'guest_format': None, 'mount_device': '/dev/sdb', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421287', 'volume_id': '554d5299-0a48-44f8-bb8e-9328f519c7ee', 'name': 'volume-554d5299-0a48-44f8-bb8e-9328f519c7ee', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'b4cd871a-30ea-4b7a-98ad-00b8676dc2cd', 'attached_at': '', 'detached_at': '', 'volume_id': '554d5299-0a48-44f8-bb8e-9328f519c7ee', 'serial': '554d5299-0a48-44f8-bb8e-9328f519c7ee'}, 'volume_type': None}], 'swap': None} {{(pid=62684) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 2015.804843] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Volume attach. Driver type: vmdk {{(pid=62684) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2015.805137] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421287', 'volume_id': '554d5299-0a48-44f8-bb8e-9328f519c7ee', 'name': 'volume-554d5299-0a48-44f8-bb8e-9328f519c7ee', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'b4cd871a-30ea-4b7a-98ad-00b8676dc2cd', 'attached_at': '', 'detached_at': '', 'volume_id': '554d5299-0a48-44f8-bb8e-9328f519c7ee', 'serial': '554d5299-0a48-44f8-bb8e-9328f519c7ee'} {{(pid=62684) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2015.806038] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d70bff4-9c8a-4ab9-be75-9e8e8f790b1c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.832785] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06815f1c-a05a-4d8e-acdd-33da25281be3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.844818] env[62684]: DEBUG oslo_vmware.api [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053068, 'name': Rename_Task, 'duration_secs': 0.137581} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2015.857871] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2015.866820] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Reconfiguring VM instance instance-00000025 to attach disk [datastore2] volume-554d5299-0a48-44f8-bb8e-9328f519c7ee/volume-554d5299-0a48-44f8-bb8e-9328f519c7ee.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2015.869718] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0c8c52a1-495e-44e9-8694-fa740674f17d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.871816] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-85fe5eec-1d15-4899-9e6e-c42fb9ac051b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.890203] env[62684]: DEBUG oslo_vmware.api [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 2015.890203] env[62684]: value = "task-2053070" [ 2015.890203] env[62684]: _type = "Task" [ 2015.890203] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2015.891492] env[62684]: DEBUG oslo_vmware.api [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 2015.891492] env[62684]: value = "task-2053071" [ 2015.891492] env[62684]: _type = "Task" [ 2015.891492] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2015.901408] env[62684]: DEBUG oslo_vmware.api [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053070, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2015.904346] env[62684]: DEBUG oslo_vmware.api [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053071, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2015.908529] env[62684]: DEBUG oslo_concurrency.lockutils [None req-21b37dfa-cf12-48d6-942e-b36d027ce5c8 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.952s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2015.910609] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.714s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2015.912599] env[62684]: INFO nova.compute.claims [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2015.933023] env[62684]: INFO nova.scheduler.client.report [None req-21b37dfa-cf12-48d6-942e-b36d027ce5c8 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Deleted allocations for instance 4a15d298-115f-4132-8be0-00e623fa21d8 [ 2015.981969] env[62684]: DEBUG nova.compute.manager [req-ad11dc20-163d-4257-b97b-abea6ef529de req-7ef8fa37-e264-4041-aebb-01d99b433a4a service nova] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Received event network-changed-f3a330d0-ed4d-48e9-956c-1f6ee2137ea2 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2015.982190] env[62684]: DEBUG nova.compute.manager [req-ad11dc20-163d-4257-b97b-abea6ef529de req-7ef8fa37-e264-4041-aebb-01d99b433a4a service nova] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Refreshing instance network info cache due to event network-changed-f3a330d0-ed4d-48e9-956c-1f6ee2137ea2. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2015.983188] env[62684]: DEBUG oslo_concurrency.lockutils [req-ad11dc20-163d-4257-b97b-abea6ef529de req-7ef8fa37-e264-4041-aebb-01d99b433a4a service nova] Acquiring lock "refresh_cache-548df581-073b-41d4-bcbe-df7342a2beca" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2016.088231] env[62684]: DEBUG nova.network.neutron [req-b2024ae8-cb60-40fd-9e4a-2f5ee15e875f req-72732ee0-6aa9-475b-a5c1-1bed0b85f369 service nova] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Updated VIF entry in instance network info cache for port f3a330d0-ed4d-48e9-956c-1f6ee2137ea2. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2016.088582] env[62684]: DEBUG nova.network.neutron [req-b2024ae8-cb60-40fd-9e4a-2f5ee15e875f req-72732ee0-6aa9-475b-a5c1-1bed0b85f369 service nova] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Updating instance_info_cache with network_info: [{"id": "f3a330d0-ed4d-48e9-956c-1f6ee2137ea2", "address": "fa:16:3e:ba:1c:e2", "network": {"id": "944b4fdf-4610-4ecc-acbb-a4943f324b5d", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1439448096-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c64d2a551d44429ca54f58862c64fe9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3a330d0-ed", "ovs_interfaceid": "f3a330d0-ed4d-48e9-956c-1f6ee2137ea2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2016.105153] env[62684]: DEBUG oslo_vmware.api [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2053069, 'name': ReconfigVM_Task, 'duration_secs': 0.250525} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2016.105805] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Reconfigured VM instance instance-00000030 to attach disk [datastore2] 02dc8c41-5092-4f84-9722-37d4df3a459a/02dc8c41-5092-4f84-9722-37d4df3a459a.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2016.109866] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-937802e0-2b51-4c1a-82fd-af40c4ef02cc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.109866] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e9c901fc-770a-4414-9eea-a44b895a99b0 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Acquiring lock "548df581-073b-41d4-bcbe-df7342a2beca" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2016.109866] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e9c901fc-770a-4414-9eea-a44b895a99b0 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Lock "548df581-073b-41d4-bcbe-df7342a2beca" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2016.110174] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e9c901fc-770a-4414-9eea-a44b895a99b0 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Acquiring lock "548df581-073b-41d4-bcbe-df7342a2beca-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2016.110368] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e9c901fc-770a-4414-9eea-a44b895a99b0 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Lock "548df581-073b-41d4-bcbe-df7342a2beca-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2016.110538] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e9c901fc-770a-4414-9eea-a44b895a99b0 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Lock "548df581-073b-41d4-bcbe-df7342a2beca-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2016.112578] env[62684]: INFO nova.compute.manager [None req-e9c901fc-770a-4414-9eea-a44b895a99b0 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Terminating instance [ 2016.114764] env[62684]: DEBUG nova.compute.manager [None req-e9c901fc-770a-4414-9eea-a44b895a99b0 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2016.115034] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c901fc-770a-4414-9eea-a44b895a99b0 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2016.116711] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a858b16-9189-44b3-994d-cde3d5cb1ee7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.135073] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64a937e0-68e1-4c25-b31d-78e8fc9e89e9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.139776] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9c901fc-770a-4414-9eea-a44b895a99b0 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2016.140386] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b836a9f1-63d1-4075-b993-e16afcd87b77 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.159174] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85beac1d-449a-43d7-b792-3f494bc6d1a9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.162967] env[62684]: DEBUG oslo_vmware.api [None req-e9c901fc-770a-4414-9eea-a44b895a99b0 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Waiting for the task: (returnval){ [ 2016.162967] env[62684]: value = "task-2053072" [ 2016.162967] env[62684]: _type = "Task" [ 2016.162967] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2016.180950] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d0abd60-453d-49bf-85d6-92d0883ce964 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.186834] env[62684]: DEBUG oslo_vmware.api [None req-e9c901fc-770a-4414-9eea-a44b895a99b0 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Task: {'id': task-2053072, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2016.191120] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2016.191407] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c12ffdac-3f73-4347-ba3e-2a703e392baa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.197566] env[62684]: DEBUG oslo_vmware.api [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for the task: (returnval){ [ 2016.197566] env[62684]: value = "task-2053073" [ 2016.197566] env[62684]: _type = "Task" [ 2016.197566] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2016.205570] env[62684]: DEBUG oslo_vmware.api [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2053073, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2016.224639] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Releasing lock "refresh_cache-7b29207a-7fa8-4374-819e-c046b2014969" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2016.224993] env[62684]: DEBUG nova.compute.manager [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Instance network_info: |[{"id": "c77f6606-e602-4667-992c-c6a0e052e01a", "address": "fa:16:3e:43:c7:f0", "network": {"id": "899020b7-a29e-4a35-bf3c-f9aebda1208d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1035902693-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "feab568b5c9e41bfa2ca824d44bcc4e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "06eaa4c9-dbc2-4d38-a844-7bf76e7b5a64", "external-id": "nsx-vlan-transportzone-804", "segmentation_id": 804, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc77f6606-e6", "ovs_interfaceid": "c77f6606-e602-4667-992c-c6a0e052e01a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2016.225673] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:43:c7:f0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '06eaa4c9-dbc2-4d38-a844-7bf76e7b5a64', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c77f6606-e602-4667-992c-c6a0e052e01a', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2016.238797] env[62684]: DEBUG oslo.service.loopingcall [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2016.239310] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2016.239705] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3834e880-db5f-4a79-8489-fa5bfb619c7e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.264128] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2016.264128] env[62684]: value = "task-2053074" [ 2016.264128] env[62684]: _type = "Task" [ 2016.264128] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2016.272719] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053074, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2016.404784] env[62684]: DEBUG oslo_vmware.api [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053071, 'name': ReconfigVM_Task, 'duration_secs': 0.288447} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2016.407397] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Reconfigured VM instance instance-00000025 to attach disk [datastore2] volume-554d5299-0a48-44f8-bb8e-9328f519c7ee/volume-554d5299-0a48-44f8-bb8e-9328f519c7ee.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2016.412627] env[62684]: DEBUG oslo_vmware.api [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053070, 'name': PowerOnVM_Task, 'duration_secs': 0.453677} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2016.412627] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bc6f5a5f-cd1b-40c3-b19f-c6e087b107f8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.423116] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2016.423116] env[62684]: INFO nova.compute.manager [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Took 8.00 seconds to spawn the instance on the hypervisor. [ 2016.424291] env[62684]: DEBUG nova.compute.manager [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2016.427718] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee155296-07e3-4758-ab59-c7d5f79cc0c2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.447694] env[62684]: DEBUG oslo_concurrency.lockutils [None req-21b37dfa-cf12-48d6-942e-b36d027ce5c8 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "4a15d298-115f-4132-8be0-00e623fa21d8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.445s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2016.454414] env[62684]: DEBUG oslo_vmware.api [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 2016.454414] env[62684]: value = "task-2053075" [ 2016.454414] env[62684]: _type = "Task" [ 2016.454414] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2016.463211] env[62684]: DEBUG oslo_vmware.api [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053075, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2016.591927] env[62684]: DEBUG oslo_concurrency.lockutils [req-b2024ae8-cb60-40fd-9e4a-2f5ee15e875f req-72732ee0-6aa9-475b-a5c1-1bed0b85f369 service nova] Releasing lock "refresh_cache-548df581-073b-41d4-bcbe-df7342a2beca" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2016.593090] env[62684]: DEBUG oslo_concurrency.lockutils [req-ad11dc20-163d-4257-b97b-abea6ef529de req-7ef8fa37-e264-4041-aebb-01d99b433a4a service nova] Acquired lock "refresh_cache-548df581-073b-41d4-bcbe-df7342a2beca" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2016.593090] env[62684]: DEBUG nova.network.neutron [req-ad11dc20-163d-4257-b97b-abea6ef529de req-7ef8fa37-e264-4041-aebb-01d99b433a4a service nova] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Refreshing network info cache for port f3a330d0-ed4d-48e9-956c-1f6ee2137ea2 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2016.671905] env[62684]: DEBUG oslo_vmware.api [None req-e9c901fc-770a-4414-9eea-a44b895a99b0 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Task: {'id': task-2053072, 'name': PowerOffVM_Task, 'duration_secs': 0.245978} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2016.672315] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9c901fc-770a-4414-9eea-a44b895a99b0 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2016.672479] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c901fc-770a-4414-9eea-a44b895a99b0 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2016.672769] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5fbab61a-0233-4d94-a22a-b3e68f3540f4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.700205] env[62684]: DEBUG nova.compute.manager [req-a288d7d8-d422-46df-98d4-4b44b618d2a7 req-e2dc88af-1fa5-431e-80de-a12c6df4d905 service nova] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Received event network-changed-c77f6606-e602-4667-992c-c6a0e052e01a {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2016.700294] env[62684]: DEBUG nova.compute.manager [req-a288d7d8-d422-46df-98d4-4b44b618d2a7 req-e2dc88af-1fa5-431e-80de-a12c6df4d905 service nova] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Refreshing instance network info cache due to event network-changed-c77f6606-e602-4667-992c-c6a0e052e01a. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2016.700916] env[62684]: DEBUG oslo_concurrency.lockutils [req-a288d7d8-d422-46df-98d4-4b44b618d2a7 req-e2dc88af-1fa5-431e-80de-a12c6df4d905 service nova] Acquiring lock "refresh_cache-7b29207a-7fa8-4374-819e-c046b2014969" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2016.700916] env[62684]: DEBUG oslo_concurrency.lockutils [req-a288d7d8-d422-46df-98d4-4b44b618d2a7 req-e2dc88af-1fa5-431e-80de-a12c6df4d905 service nova] Acquired lock "refresh_cache-7b29207a-7fa8-4374-819e-c046b2014969" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2016.700916] env[62684]: DEBUG nova.network.neutron [req-a288d7d8-d422-46df-98d4-4b44b618d2a7 req-e2dc88af-1fa5-431e-80de-a12c6df4d905 service nova] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Refreshing network info cache for port c77f6606-e602-4667-992c-c6a0e052e01a {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2016.713792] env[62684]: DEBUG oslo_vmware.api [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2053073, 'name': PowerOnVM_Task, 'duration_secs': 0.446242} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2016.714470] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2016.777618] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053074, 'name': CreateVM_Task, 'duration_secs': 0.386045} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2016.777618] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2016.777618] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2016.777618] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2016.777618] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2016.777618] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42376428-f8ca-4d07-b7a9-f46471c4d75b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.780590] env[62684]: DEBUG oslo_vmware.api [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Waiting for the task: (returnval){ [ 2016.780590] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52dd3de7-4081-1a39-1932-eba0b5da72a6" [ 2016.780590] env[62684]: _type = "Task" [ 2016.780590] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2016.789119] env[62684]: DEBUG oslo_vmware.api [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52dd3de7-4081-1a39-1932-eba0b5da72a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2016.827726] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c901fc-770a-4414-9eea-a44b895a99b0 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2016.827967] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c901fc-770a-4414-9eea-a44b895a99b0 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2016.828180] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9c901fc-770a-4414-9eea-a44b895a99b0 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Deleting the datastore file [datastore1] 548df581-073b-41d4-bcbe-df7342a2beca {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2016.828548] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-73a0eca7-6ae5-4aad-a346-34446d9da8b3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.837421] env[62684]: DEBUG oslo_vmware.api [None req-e9c901fc-770a-4414-9eea-a44b895a99b0 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Waiting for the task: (returnval){ [ 2016.837421] env[62684]: value = "task-2053077" [ 2016.837421] env[62684]: _type = "Task" [ 2016.837421] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2016.846867] env[62684]: DEBUG oslo_vmware.api [None req-e9c901fc-770a-4414-9eea-a44b895a99b0 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Task: {'id': task-2053077, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2016.939913] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2d7c7229-a6a8-4b01-b13e-508572408208 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquiring lock "06751c34-0724-44ba-a263-ad27fcf2920f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2016.940133] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2d7c7229-a6a8-4b01-b13e-508572408208 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "06751c34-0724-44ba-a263-ad27fcf2920f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2016.940340] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2d7c7229-a6a8-4b01-b13e-508572408208 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquiring lock "06751c34-0724-44ba-a263-ad27fcf2920f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2016.940527] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2d7c7229-a6a8-4b01-b13e-508572408208 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "06751c34-0724-44ba-a263-ad27fcf2920f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2016.940702] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2d7c7229-a6a8-4b01-b13e-508572408208 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "06751c34-0724-44ba-a263-ad27fcf2920f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2016.942727] env[62684]: INFO nova.compute.manager [None req-2d7c7229-a6a8-4b01-b13e-508572408208 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Terminating instance [ 2016.944513] env[62684]: DEBUG nova.compute.manager [None req-2d7c7229-a6a8-4b01-b13e-508572408208 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2016.944720] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2d7c7229-a6a8-4b01-b13e-508572408208 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2016.945580] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6656202e-1677-4170-9a6a-d39722bd7826 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.957909] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d7c7229-a6a8-4b01-b13e-508572408208 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2016.962955] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f18e862a-ce1b-439f-8b59-87fe7ab025b1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.964831] env[62684]: INFO nova.compute.manager [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Took 44.04 seconds to build instance. [ 2016.975992] env[62684]: DEBUG oslo_vmware.api [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053075, 'name': ReconfigVM_Task, 'duration_secs': 0.205101} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2016.976341] env[62684]: DEBUG oslo_vmware.api [None req-2d7c7229-a6a8-4b01-b13e-508572408208 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 2016.976341] env[62684]: value = "task-2053078" [ 2016.976341] env[62684]: _type = "Task" [ 2016.976341] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2016.980228] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421287', 'volume_id': '554d5299-0a48-44f8-bb8e-9328f519c7ee', 'name': 'volume-554d5299-0a48-44f8-bb8e-9328f519c7ee', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'b4cd871a-30ea-4b7a-98ad-00b8676dc2cd', 'attached_at': '', 'detached_at': '', 'volume_id': '554d5299-0a48-44f8-bb8e-9328f519c7ee', 'serial': '554d5299-0a48-44f8-bb8e-9328f519c7ee'} {{(pid=62684) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2016.981677] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ada741c6-db40-4a45-a914-b8d45abd5ec5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.996736] env[62684]: DEBUG oslo_vmware.api [None req-2d7c7229-a6a8-4b01-b13e-508572408208 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2053078, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2016.997078] env[62684]: DEBUG oslo_vmware.api [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 2016.997078] env[62684]: value = "task-2053079" [ 2016.997078] env[62684]: _type = "Task" [ 2016.997078] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2017.008011] env[62684]: DEBUG oslo_vmware.api [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053079, 'name': Rename_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2017.292528] env[62684]: DEBUG oslo_vmware.api [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52dd3de7-4081-1a39-1932-eba0b5da72a6, 'name': SearchDatastore_Task, 'duration_secs': 0.024201} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2017.297267] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2017.297525] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2017.297795] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2017.297950] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2017.298152] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2017.300996] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-65c6a00a-d21a-4503-9124-13cc00586b5c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.318172] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2017.318387] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2017.321293] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ba777ea-8286-4d0e-8617-6e7a0569e33a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.327347] env[62684]: DEBUG oslo_vmware.api [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Waiting for the task: (returnval){ [ 2017.327347] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b53bfb-12fa-550e-acda-38a5caa5f7c8" [ 2017.327347] env[62684]: _type = "Task" [ 2017.327347] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2017.328931] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce43ef6e-fb33-4755-a423-34aa8810e73e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.344804] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c06331d6-4805-4b68-a6b7-ebfa39df0bef {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.348108] env[62684]: DEBUG oslo_vmware.api [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b53bfb-12fa-550e-acda-38a5caa5f7c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2017.356045] env[62684]: DEBUG oslo_vmware.api [None req-e9c901fc-770a-4414-9eea-a44b895a99b0 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Task: {'id': task-2053077, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.442103} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2017.385061] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9c901fc-770a-4414-9eea-a44b895a99b0 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2017.386270] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c901fc-770a-4414-9eea-a44b895a99b0 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2017.386270] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c901fc-770a-4414-9eea-a44b895a99b0 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2017.386270] env[62684]: INFO nova.compute.manager [None req-e9c901fc-770a-4414-9eea-a44b895a99b0 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Took 1.27 seconds to destroy the instance on the hypervisor. [ 2017.387068] env[62684]: DEBUG oslo.service.loopingcall [None req-e9c901fc-770a-4414-9eea-a44b895a99b0 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2017.393077] env[62684]: DEBUG nova.compute.manager [-] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2017.393223] env[62684]: DEBUG nova.network.neutron [-] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2017.396528] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28df82d6-4d99-4620-84d6-81beaec85bc7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.404599] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a105f27-3cce-4e45-ba79-6916b19240ce {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.420360] env[62684]: DEBUG nova.compute.provider_tree [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2017.471883] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b7192c60-d08d-4c8e-ba7e-c543ed6a38f5 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "81b7949d-be24-46c9-8dc8-c249b65bb039" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.557s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2017.485528] env[62684]: DEBUG nova.network.neutron [req-a288d7d8-d422-46df-98d4-4b44b618d2a7 req-e2dc88af-1fa5-431e-80de-a12c6df4d905 service nova] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Updated VIF entry in instance network info cache for port c77f6606-e602-4667-992c-c6a0e052e01a. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2017.485932] env[62684]: DEBUG nova.network.neutron [req-a288d7d8-d422-46df-98d4-4b44b618d2a7 req-e2dc88af-1fa5-431e-80de-a12c6df4d905 service nova] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Updating instance_info_cache with network_info: [{"id": "c77f6606-e602-4667-992c-c6a0e052e01a", "address": "fa:16:3e:43:c7:f0", "network": {"id": "899020b7-a29e-4a35-bf3c-f9aebda1208d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1035902693-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "feab568b5c9e41bfa2ca824d44bcc4e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "06eaa4c9-dbc2-4d38-a844-7bf76e7b5a64", "external-id": "nsx-vlan-transportzone-804", "segmentation_id": 804, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc77f6606-e6", "ovs_interfaceid": "c77f6606-e602-4667-992c-c6a0e052e01a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2017.492871] env[62684]: DEBUG oslo_vmware.api [None req-2d7c7229-a6a8-4b01-b13e-508572408208 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2053078, 'name': PowerOffVM_Task, 'duration_secs': 0.235228} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2017.493692] env[62684]: DEBUG nova.network.neutron [req-ad11dc20-163d-4257-b97b-abea6ef529de req-7ef8fa37-e264-4041-aebb-01d99b433a4a service nova] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Updated VIF entry in instance network info cache for port f3a330d0-ed4d-48e9-956c-1f6ee2137ea2. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2017.494036] env[62684]: DEBUG nova.network.neutron [req-ad11dc20-163d-4257-b97b-abea6ef529de req-7ef8fa37-e264-4041-aebb-01d99b433a4a service nova] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Updating instance_info_cache with network_info: [{"id": "f3a330d0-ed4d-48e9-956c-1f6ee2137ea2", "address": "fa:16:3e:ba:1c:e2", "network": {"id": "944b4fdf-4610-4ecc-acbb-a4943f324b5d", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1439448096-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c64d2a551d44429ca54f58862c64fe9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3a330d0-ed", "ovs_interfaceid": "f3a330d0-ed4d-48e9-956c-1f6ee2137ea2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2017.495172] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d7c7229-a6a8-4b01-b13e-508572408208 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2017.495367] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2d7c7229-a6a8-4b01-b13e-508572408208 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2017.498624] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-77c6cd07-6eb8-4599-a532-7f95c5c4377d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.509808] env[62684]: DEBUG oslo_vmware.api [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053079, 'name': Rename_Task, 'duration_secs': 0.193404} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2017.510104] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2017.510365] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7616806b-796b-43ee-a6c1-7795eb42f8e0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.517158] env[62684]: DEBUG oslo_vmware.api [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 2017.517158] env[62684]: value = "task-2053081" [ 2017.517158] env[62684]: _type = "Task" [ 2017.517158] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2017.525597] env[62684]: DEBUG oslo_vmware.api [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053081, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2017.732665] env[62684]: INFO nova.compute.manager [None req-de1887fd-7a10-4927-ba53-6ff18f85f5c1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Updating instance to original state: 'active' [ 2017.839373] env[62684]: DEBUG oslo_vmware.api [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b53bfb-12fa-550e-acda-38a5caa5f7c8, 'name': SearchDatastore_Task, 'duration_secs': 0.029182} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2017.840297] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb420937-f1e8-49f8-ad35-d566b0e4b207 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.845776] env[62684]: DEBUG oslo_vmware.api [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Waiting for the task: (returnval){ [ 2017.845776] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]527465ca-3251-e152-cc06-73a8e6037880" [ 2017.845776] env[62684]: _type = "Task" [ 2017.845776] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2017.858942] env[62684]: DEBUG oslo_vmware.api [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]527465ca-3251-e152-cc06-73a8e6037880, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2017.923883] env[62684]: DEBUG nova.scheduler.client.report [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2017.989645] env[62684]: DEBUG oslo_concurrency.lockutils [req-a288d7d8-d422-46df-98d4-4b44b618d2a7 req-e2dc88af-1fa5-431e-80de-a12c6df4d905 service nova] Releasing lock "refresh_cache-7b29207a-7fa8-4374-819e-c046b2014969" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2017.999512] env[62684]: DEBUG oslo_concurrency.lockutils [req-ad11dc20-163d-4257-b97b-abea6ef529de req-7ef8fa37-e264-4041-aebb-01d99b433a4a service nova] Releasing lock "refresh_cache-548df581-073b-41d4-bcbe-df7342a2beca" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2018.027624] env[62684]: DEBUG oslo_vmware.api [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053081, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.068359] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2d7c7229-a6a8-4b01-b13e-508572408208 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2018.068593] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2d7c7229-a6a8-4b01-b13e-508572408208 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2018.069036] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d7c7229-a6a8-4b01-b13e-508572408208 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Deleting the datastore file [datastore1] 06751c34-0724-44ba-a263-ad27fcf2920f {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2018.069356] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e59821f1-58d1-4a29-afac-47666adbdce9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.077314] env[62684]: DEBUG oslo_vmware.api [None req-2d7c7229-a6a8-4b01-b13e-508572408208 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 2018.077314] env[62684]: value = "task-2053082" [ 2018.077314] env[62684]: _type = "Task" [ 2018.077314] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2018.085070] env[62684]: DEBUG oslo_vmware.api [None req-2d7c7229-a6a8-4b01-b13e-508572408208 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2053082, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.136529] env[62684]: DEBUG nova.compute.manager [req-e648f816-9980-4741-aba7-c5d32eb51c6e req-6ed0bf0d-6521-4829-84fa-12c66a612618 service nova] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Received event network-vif-deleted-f3a330d0-ed4d-48e9-956c-1f6ee2137ea2 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2018.136909] env[62684]: INFO nova.compute.manager [req-e648f816-9980-4741-aba7-c5d32eb51c6e req-6ed0bf0d-6521-4829-84fa-12c66a612618 service nova] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Neutron deleted interface f3a330d0-ed4d-48e9-956c-1f6ee2137ea2; detaching it from the instance and deleting it from the info cache [ 2018.137156] env[62684]: DEBUG nova.network.neutron [req-e648f816-9980-4741-aba7-c5d32eb51c6e req-6ed0bf0d-6521-4829-84fa-12c66a612618 service nova] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2018.297477] env[62684]: DEBUG nova.network.neutron [-] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2018.358105] env[62684]: DEBUG oslo_vmware.api [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]527465ca-3251-e152-cc06-73a8e6037880, 'name': SearchDatastore_Task, 'duration_secs': 0.029429} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2018.358421] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2018.358698] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 7b29207a-7fa8-4374-819e-c046b2014969/7b29207a-7fa8-4374-819e-c046b2014969.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2018.358992] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e3cf0632-4ac4-465b-8d11-597d7dc16197 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.365606] env[62684]: DEBUG oslo_vmware.api [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Waiting for the task: (returnval){ [ 2018.365606] env[62684]: value = "task-2053083" [ 2018.365606] env[62684]: _type = "Task" [ 2018.365606] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2018.374350] env[62684]: DEBUG oslo_vmware.api [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': task-2053083, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.402476] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Acquiring lock "380a804e-e1bf-4efa-8bb8-213733778927" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2018.402733] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Lock "380a804e-e1bf-4efa-8bb8-213733778927" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2018.429940] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.519s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2018.430577] env[62684]: DEBUG nova.compute.manager [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2018.433820] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f29f234e-ace6-40a7-8d46-e1720b4c2ff9 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.002s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2018.433899] env[62684]: DEBUG nova.objects.instance [None req-f29f234e-ace6-40a7-8d46-e1720b4c2ff9 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lazy-loading 'resources' on Instance uuid 6faeae10-c0bd-4297-b992-c05511fedb21 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2018.531884] env[62684]: DEBUG oslo_vmware.api [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053081, 'name': PowerOnVM_Task, 'duration_secs': 0.75177} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2018.531884] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2018.591410] env[62684]: DEBUG oslo_vmware.api [None req-2d7c7229-a6a8-4b01-b13e-508572408208 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2053082, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.198162} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2018.591488] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d7c7229-a6a8-4b01-b13e-508572408208 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2018.591737] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2d7c7229-a6a8-4b01-b13e-508572408208 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2018.592135] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2d7c7229-a6a8-4b01-b13e-508572408208 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2018.592404] env[62684]: INFO nova.compute.manager [None req-2d7c7229-a6a8-4b01-b13e-508572408208 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Took 1.65 seconds to destroy the instance on the hypervisor. [ 2018.592709] env[62684]: DEBUG oslo.service.loopingcall [None req-2d7c7229-a6a8-4b01-b13e-508572408208 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2018.592959] env[62684]: DEBUG nova.compute.manager [-] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2018.593898] env[62684]: DEBUG nova.network.neutron [-] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2018.640889] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d0e835e2-1dec-49eb-9422-18739c3c2388 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.653582] env[62684]: DEBUG nova.compute.manager [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2018.656734] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2d5e573-be3c-4c79-be5b-e7c321032650 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.672716] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14648589-100e-423c-b814-0a4823cdafe6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.705728] env[62684]: DEBUG nova.compute.manager [req-e648f816-9980-4741-aba7-c5d32eb51c6e req-6ed0bf0d-6521-4829-84fa-12c66a612618 service nova] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Detach interface failed, port_id=f3a330d0-ed4d-48e9-956c-1f6ee2137ea2, reason: Instance 548df581-073b-41d4-bcbe-df7342a2beca could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2018.802205] env[62684]: INFO nova.compute.manager [-] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Took 1.41 seconds to deallocate network for instance. [ 2018.878692] env[62684]: DEBUG oslo_vmware.api [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': task-2053083, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.906496] env[62684]: DEBUG nova.compute.manager [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2018.939733] env[62684]: DEBUG nova.compute.utils [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2018.945586] env[62684]: DEBUG nova.compute.manager [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2018.946030] env[62684]: DEBUG nova.network.neutron [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2019.085692] env[62684]: DEBUG nova.policy [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '04b9379883ea40959090a52ce58805a8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9e2e0ad7001b4b59805c1d6a3a0caf35', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2019.127223] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7549bac6-7f98-4938-a92d-29505b6f5ea1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquiring lock "02dc8c41-5092-4f84-9722-37d4df3a459a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2019.127540] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7549bac6-7f98-4938-a92d-29505b6f5ea1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Lock "02dc8c41-5092-4f84-9722-37d4df3a459a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2019.127785] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7549bac6-7f98-4938-a92d-29505b6f5ea1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquiring lock "02dc8c41-5092-4f84-9722-37d4df3a459a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2019.127977] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7549bac6-7f98-4938-a92d-29505b6f5ea1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Lock "02dc8c41-5092-4f84-9722-37d4df3a459a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2019.128198] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7549bac6-7f98-4938-a92d-29505b6f5ea1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Lock "02dc8c41-5092-4f84-9722-37d4df3a459a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2019.130501] env[62684]: INFO nova.compute.manager [None req-7549bac6-7f98-4938-a92d-29505b6f5ea1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Terminating instance [ 2019.132607] env[62684]: DEBUG nova.compute.manager [None req-7549bac6-7f98-4938-a92d-29505b6f5ea1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2019.132815] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7549bac6-7f98-4938-a92d-29505b6f5ea1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2019.134171] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8beacc52-441f-4f77-a8d2-5662f03fa3d0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.148123] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-7549bac6-7f98-4938-a92d-29505b6f5ea1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2019.148123] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a55a3ab3-87b9-444c-af62-c9024604bac9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.154236] env[62684]: DEBUG oslo_vmware.api [None req-7549bac6-7f98-4938-a92d-29505b6f5ea1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for the task: (returnval){ [ 2019.154236] env[62684]: value = "task-2053084" [ 2019.154236] env[62684]: _type = "Task" [ 2019.154236] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2019.163848] env[62684]: DEBUG oslo_vmware.api [None req-7549bac6-7f98-4938-a92d-29505b6f5ea1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2053084, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2019.195289] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0b824087-0ad4-4106-87a5-1576b86936df tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lock "b4cd871a-30ea-4b7a-98ad-00b8676dc2cd" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 57.713s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2019.307820] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e9c901fc-770a-4414-9eea-a44b895a99b0 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2019.365908] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f81c3bbf-93fe-4783-85af-bb9446fac164 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.379630] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-966a5aab-39eb-4aa6-8b3f-bb653750e8dd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.383021] env[62684]: DEBUG oslo_vmware.api [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': task-2053083, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.681983} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2019.383305] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 7b29207a-7fa8-4374-819e-c046b2014969/7b29207a-7fa8-4374-819e-c046b2014969.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2019.383524] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2019.384134] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-45020851-6987-41f3-be12-ca742e0438d5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.421405] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8e108c8-ad4a-426c-8d69-2e6711ff2735 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.424738] env[62684]: DEBUG oslo_vmware.api [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Waiting for the task: (returnval){ [ 2019.424738] env[62684]: value = "task-2053085" [ 2019.424738] env[62684]: _type = "Task" [ 2019.424738] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2019.432697] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd401fe5-a3dc-4be4-89dd-b21772963a81 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.440911] env[62684]: DEBUG oslo_vmware.api [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': task-2053085, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2019.442780] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2019.453929] env[62684]: DEBUG nova.compute.manager [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2019.457850] env[62684]: DEBUG nova.compute.provider_tree [None req-f29f234e-ace6-40a7-8d46-e1720b4c2ff9 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2019.664157] env[62684]: DEBUG oslo_vmware.api [None req-7549bac6-7f98-4938-a92d-29505b6f5ea1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2053084, 'name': PowerOffVM_Task, 'duration_secs': 0.323277} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2019.664462] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-7549bac6-7f98-4938-a92d-29505b6f5ea1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2019.664643] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7549bac6-7f98-4938-a92d-29505b6f5ea1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2019.664949] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ddac828f-db01-4638-994d-c6a4578cc649 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.724471] env[62684]: DEBUG nova.network.neutron [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Successfully created port: 204e0bce-b0f4-4edd-b609-c528bf00f2fe {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2019.863166] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7549bac6-7f98-4938-a92d-29505b6f5ea1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2019.863166] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7549bac6-7f98-4938-a92d-29505b6f5ea1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2019.863166] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-7549bac6-7f98-4938-a92d-29505b6f5ea1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Deleting the datastore file [datastore2] 02dc8c41-5092-4f84-9722-37d4df3a459a {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2019.863866] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b38f4ef1-30b7-4126-94f6-d594a0f098fd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.872016] env[62684]: DEBUG oslo_vmware.api [None req-7549bac6-7f98-4938-a92d-29505b6f5ea1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for the task: (returnval){ [ 2019.872016] env[62684]: value = "task-2053087" [ 2019.872016] env[62684]: _type = "Task" [ 2019.872016] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2019.878498] env[62684]: DEBUG oslo_vmware.api [None req-7549bac6-7f98-4938-a92d-29505b6f5ea1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2053087, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2019.934976] env[62684]: DEBUG oslo_vmware.api [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': task-2053085, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068603} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2019.935669] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2019.936612] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5d80ca6-7714-4bd1-b0f0-68f6f8a646c3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.959996] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] 7b29207a-7fa8-4374-819e-c046b2014969/7b29207a-7fa8-4374-819e-c046b2014969.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2019.964029] env[62684]: DEBUG nova.scheduler.client.report [None req-f29f234e-ace6-40a7-8d46-e1720b4c2ff9 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2019.967075] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-be706ea1-be28-48ec-ba93-8e347cd175ea {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.982345] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f29f234e-ace6-40a7-8d46-e1720b4c2ff9 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.549s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2019.984436] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.714s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2019.986786] env[62684]: INFO nova.compute.claims [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2019.995328] env[62684]: DEBUG oslo_vmware.api [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Waiting for the task: (returnval){ [ 2019.995328] env[62684]: value = "task-2053088" [ 2019.995328] env[62684]: _type = "Task" [ 2019.995328] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2020.003411] env[62684]: DEBUG oslo_vmware.api [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': task-2053088, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2020.006833] env[62684]: INFO nova.scheduler.client.report [None req-f29f234e-ace6-40a7-8d46-e1720b4c2ff9 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Deleted allocations for instance 6faeae10-c0bd-4297-b992-c05511fedb21 [ 2020.047114] env[62684]: DEBUG nova.network.neutron [-] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2020.171947] env[62684]: DEBUG nova.compute.manager [req-05774aa8-5495-4910-882a-f04c90a125b8 req-9f9d49c2-dcba-4d47-9ecc-26f9e49fbe9f service nova] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Received event network-changed-fafc2062-9754-4ce0-8647-362b6bb8f8d7 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2020.172339] env[62684]: DEBUG nova.compute.manager [req-05774aa8-5495-4910-882a-f04c90a125b8 req-9f9d49c2-dcba-4d47-9ecc-26f9e49fbe9f service nova] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Refreshing instance network info cache due to event network-changed-fafc2062-9754-4ce0-8647-362b6bb8f8d7. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2020.172613] env[62684]: DEBUG oslo_concurrency.lockutils [req-05774aa8-5495-4910-882a-f04c90a125b8 req-9f9d49c2-dcba-4d47-9ecc-26f9e49fbe9f service nova] Acquiring lock "refresh_cache-81b7949d-be24-46c9-8dc8-c249b65bb039" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2020.172765] env[62684]: DEBUG oslo_concurrency.lockutils [req-05774aa8-5495-4910-882a-f04c90a125b8 req-9f9d49c2-dcba-4d47-9ecc-26f9e49fbe9f service nova] Acquired lock "refresh_cache-81b7949d-be24-46c9-8dc8-c249b65bb039" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2020.172931] env[62684]: DEBUG nova.network.neutron [req-05774aa8-5495-4910-882a-f04c90a125b8 req-9f9d49c2-dcba-4d47-9ecc-26f9e49fbe9f service nova] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Refreshing network info cache for port fafc2062-9754-4ce0-8647-362b6bb8f8d7 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2020.379893] env[62684]: DEBUG oslo_vmware.api [None req-7549bac6-7f98-4938-a92d-29505b6f5ea1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2053087, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2020.483024] env[62684]: DEBUG nova.compute.manager [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2020.506941] env[62684]: DEBUG oslo_vmware.api [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': task-2053088, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2020.513332] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f29f234e-ace6-40a7-8d46-e1720b4c2ff9 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "6faeae10-c0bd-4297-b992-c05511fedb21" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.020s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2020.517089] env[62684]: DEBUG nova.virt.hardware [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2020.517466] env[62684]: DEBUG nova.virt.hardware [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2020.517731] env[62684]: DEBUG nova.virt.hardware [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2020.518062] env[62684]: DEBUG nova.virt.hardware [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2020.518365] env[62684]: DEBUG nova.virt.hardware [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2020.518681] env[62684]: DEBUG nova.virt.hardware [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2020.519088] env[62684]: DEBUG nova.virt.hardware [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2020.519383] env[62684]: DEBUG nova.virt.hardware [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2020.519699] env[62684]: DEBUG nova.virt.hardware [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2020.519996] env[62684]: DEBUG nova.virt.hardware [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2020.520339] env[62684]: DEBUG nova.virt.hardware [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2020.521350] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6396e4c4-c935-421f-a88e-f0447d0171bd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.530763] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cabfc2a-f3b6-40c9-b1a6-de7726fb954a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.551192] env[62684]: INFO nova.compute.manager [-] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Took 1.96 seconds to deallocate network for instance. [ 2020.885358] env[62684]: DEBUG oslo_vmware.api [None req-7549bac6-7f98-4938-a92d-29505b6f5ea1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2053087, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.632452} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2020.885628] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-7549bac6-7f98-4938-a92d-29505b6f5ea1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2020.885839] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7549bac6-7f98-4938-a92d-29505b6f5ea1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2020.886788] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7549bac6-7f98-4938-a92d-29505b6f5ea1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2020.887088] env[62684]: INFO nova.compute.manager [None req-7549bac6-7f98-4938-a92d-29505b6f5ea1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Took 1.75 seconds to destroy the instance on the hypervisor. [ 2020.887393] env[62684]: DEBUG oslo.service.loopingcall [None req-7549bac6-7f98-4938-a92d-29505b6f5ea1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2020.887638] env[62684]: DEBUG nova.compute.manager [-] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2020.887752] env[62684]: DEBUG nova.network.neutron [-] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2020.941917] env[62684]: DEBUG nova.network.neutron [req-05774aa8-5495-4910-882a-f04c90a125b8 req-9f9d49c2-dcba-4d47-9ecc-26f9e49fbe9f service nova] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Updated VIF entry in instance network info cache for port fafc2062-9754-4ce0-8647-362b6bb8f8d7. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2020.941917] env[62684]: DEBUG nova.network.neutron [req-05774aa8-5495-4910-882a-f04c90a125b8 req-9f9d49c2-dcba-4d47-9ecc-26f9e49fbe9f service nova] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Updating instance_info_cache with network_info: [{"id": "fafc2062-9754-4ce0-8647-362b6bb8f8d7", "address": "fa:16:3e:a1:7d:89", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.142", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfafc2062-97", "ovs_interfaceid": "fafc2062-9754-4ce0-8647-362b6bb8f8d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2021.012241] env[62684]: DEBUG oslo_vmware.api [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': task-2053088, 'name': ReconfigVM_Task, 'duration_secs': 0.873243} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2021.012241] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Reconfigured VM instance instance-00000043 to attach disk [datastore1] 7b29207a-7fa8-4374-819e-c046b2014969/7b29207a-7fa8-4374-819e-c046b2014969.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2021.012608] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c6cf6cea-1c0d-48ad-80ff-3dc652b02104 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.019385] env[62684]: DEBUG oslo_vmware.api [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Waiting for the task: (returnval){ [ 2021.019385] env[62684]: value = "task-2053089" [ 2021.019385] env[62684]: _type = "Task" [ 2021.019385] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2021.034259] env[62684]: DEBUG oslo_vmware.api [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': task-2053089, 'name': Rename_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.057538] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2d7c7229-a6a8-4b01-b13e-508572408208 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2021.221687] env[62684]: DEBUG nova.compute.manager [req-00f1a537-eecc-4958-b054-9ad3885d87b0 req-ab5f8a49-33c9-4636-927e-562fa69d011b service nova] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Received event network-vif-deleted-39c750a6-1076-4354-bc30-d7f50ca821b5 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2021.221895] env[62684]: INFO nova.compute.manager [req-00f1a537-eecc-4958-b054-9ad3885d87b0 req-ab5f8a49-33c9-4636-927e-562fa69d011b service nova] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Neutron deleted interface 39c750a6-1076-4354-bc30-d7f50ca821b5; detaching it from the instance and deleting it from the info cache [ 2021.222904] env[62684]: DEBUG nova.network.neutron [req-00f1a537-eecc-4958-b054-9ad3885d87b0 req-ab5f8a49-33c9-4636-927e-562fa69d011b service nova] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2021.359100] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fe1ecfd-22d2-4ff7-97f8-f8f8a5ecc601 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.369082] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4a5dd08-cbed-4d29-bd7e-51d00747aa23 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.372855] env[62684]: DEBUG nova.network.neutron [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Successfully updated port: 204e0bce-b0f4-4edd-b609-c528bf00f2fe {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2021.402882] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af9a4952-46fa-4598-ae61-aa793a8116e4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.411631] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e19366a-48d5-4665-a8c6-4be6436bc7f2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.416330] env[62684]: DEBUG oslo_concurrency.lockutils [None req-53a806c2-5cd6-414a-bf2e-3dfcce08f0ad tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "274d214a-4b92-4900-a66c-54baea2a68f8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2021.416716] env[62684]: DEBUG oslo_concurrency.lockutils [None req-53a806c2-5cd6-414a-bf2e-3dfcce08f0ad tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "274d214a-4b92-4900-a66c-54baea2a68f8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2021.417010] env[62684]: DEBUG oslo_concurrency.lockutils [None req-53a806c2-5cd6-414a-bf2e-3dfcce08f0ad tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "274d214a-4b92-4900-a66c-54baea2a68f8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2021.417222] env[62684]: DEBUG oslo_concurrency.lockutils [None req-53a806c2-5cd6-414a-bf2e-3dfcce08f0ad tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "274d214a-4b92-4900-a66c-54baea2a68f8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2021.417400] env[62684]: DEBUG oslo_concurrency.lockutils [None req-53a806c2-5cd6-414a-bf2e-3dfcce08f0ad tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "274d214a-4b92-4900-a66c-54baea2a68f8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2021.419442] env[62684]: INFO nova.compute.manager [None req-53a806c2-5cd6-414a-bf2e-3dfcce08f0ad tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Terminating instance [ 2021.431937] env[62684]: DEBUG nova.compute.provider_tree [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2021.433652] env[62684]: DEBUG nova.compute.manager [None req-53a806c2-5cd6-414a-bf2e-3dfcce08f0ad tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2021.433857] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-53a806c2-5cd6-414a-bf2e-3dfcce08f0ad tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2021.434658] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3737be6-ee8e-4c1f-84c7-a1fad9a0e4e2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.442191] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-53a806c2-5cd6-414a-bf2e-3dfcce08f0ad tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2021.442438] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7c0950e1-591e-415e-9b90-3c17f788d355 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.444274] env[62684]: DEBUG oslo_concurrency.lockutils [req-05774aa8-5495-4910-882a-f04c90a125b8 req-9f9d49c2-dcba-4d47-9ecc-26f9e49fbe9f service nova] Releasing lock "refresh_cache-81b7949d-be24-46c9-8dc8-c249b65bb039" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2021.444548] env[62684]: DEBUG nova.compute.manager [req-05774aa8-5495-4910-882a-f04c90a125b8 req-9f9d49c2-dcba-4d47-9ecc-26f9e49fbe9f service nova] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Received event network-vif-deleted-b5cb1869-ace8-44cb-bd59-60e4ce4e95ad {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2021.449567] env[62684]: DEBUG oslo_vmware.api [None req-53a806c2-5cd6-414a-bf2e-3dfcce08f0ad tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 2021.449567] env[62684]: value = "task-2053090" [ 2021.449567] env[62684]: _type = "Task" [ 2021.449567] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2021.459291] env[62684]: DEBUG oslo_vmware.api [None req-53a806c2-5cd6-414a-bf2e-3dfcce08f0ad tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053090, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.531309] env[62684]: DEBUG oslo_vmware.api [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': task-2053089, 'name': Rename_Task, 'duration_secs': 0.155743} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2021.531718] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2021.532100] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-597c38c0-b181-41bd-bf0b-d8fdf903bb4a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.539415] env[62684]: DEBUG oslo_vmware.api [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Waiting for the task: (returnval){ [ 2021.539415] env[62684]: value = "task-2053091" [ 2021.539415] env[62684]: _type = "Task" [ 2021.539415] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2021.549336] env[62684]: DEBUG oslo_vmware.api [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': task-2053091, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.694121] env[62684]: DEBUG nova.network.neutron [-] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2021.728295] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5a692cf4-9b16-41da-83b4-4bedda5c5968 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.738212] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e3e497e-5afa-4ec6-9156-c3ca38499e4b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.775092] env[62684]: DEBUG nova.compute.manager [req-00f1a537-eecc-4958-b054-9ad3885d87b0 req-ab5f8a49-33c9-4636-927e-562fa69d011b service nova] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Detach interface failed, port_id=39c750a6-1076-4354-bc30-d7f50ca821b5, reason: Instance 02dc8c41-5092-4f84-9722-37d4df3a459a could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2021.876104] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Acquiring lock "refresh_cache-fcc937e3-163d-432b-a131-a53c002e5e8d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2021.876104] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Acquired lock "refresh_cache-fcc937e3-163d-432b-a131-a53c002e5e8d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2021.876196] env[62684]: DEBUG nova.network.neutron [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2021.939396] env[62684]: DEBUG nova.scheduler.client.report [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2021.960460] env[62684]: DEBUG oslo_vmware.api [None req-53a806c2-5cd6-414a-bf2e-3dfcce08f0ad tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053090, 'name': PowerOffVM_Task, 'duration_secs': 0.225046} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2021.961336] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-53a806c2-5cd6-414a-bf2e-3dfcce08f0ad tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2021.961515] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-53a806c2-5cd6-414a-bf2e-3dfcce08f0ad tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2021.961775] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9b4615b9-a5ba-4957-8b33-86775ec77677 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.005350] env[62684]: DEBUG oslo_concurrency.lockutils [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "41da0c18-dd9c-49bb-8b0d-a907575ee22e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2022.005350] env[62684]: DEBUG oslo_concurrency.lockutils [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "41da0c18-dd9c-49bb-8b0d-a907575ee22e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2022.050574] env[62684]: DEBUG oslo_vmware.api [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': task-2053091, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.079990] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-53a806c2-5cd6-414a-bf2e-3dfcce08f0ad tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2022.080260] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-53a806c2-5cd6-414a-bf2e-3dfcce08f0ad tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2022.080451] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-53a806c2-5cd6-414a-bf2e-3dfcce08f0ad tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Deleting the datastore file [datastore1] 274d214a-4b92-4900-a66c-54baea2a68f8 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2022.080730] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-882257b9-b416-4abb-a169-7980f006499b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.087697] env[62684]: DEBUG oslo_vmware.api [None req-53a806c2-5cd6-414a-bf2e-3dfcce08f0ad tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 2022.087697] env[62684]: value = "task-2053093" [ 2022.087697] env[62684]: _type = "Task" [ 2022.087697] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2022.097107] env[62684]: DEBUG oslo_vmware.api [None req-53a806c2-5cd6-414a-bf2e-3dfcce08f0ad tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053093, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.196685] env[62684]: INFO nova.compute.manager [-] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Took 1.31 seconds to deallocate network for instance. [ 2022.203257] env[62684]: DEBUG nova.compute.manager [req-1499a8f1-d0c5-42b7-90d3-4e2e6d22a909 req-d85f32d7-0997-4da3-a15b-36878bc35557 service nova] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Received event network-vif-plugged-204e0bce-b0f4-4edd-b609-c528bf00f2fe {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2022.203480] env[62684]: DEBUG oslo_concurrency.lockutils [req-1499a8f1-d0c5-42b7-90d3-4e2e6d22a909 req-d85f32d7-0997-4da3-a15b-36878bc35557 service nova] Acquiring lock "fcc937e3-163d-432b-a131-a53c002e5e8d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2022.203693] env[62684]: DEBUG oslo_concurrency.lockutils [req-1499a8f1-d0c5-42b7-90d3-4e2e6d22a909 req-d85f32d7-0997-4da3-a15b-36878bc35557 service nova] Lock "fcc937e3-163d-432b-a131-a53c002e5e8d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2022.203906] env[62684]: DEBUG oslo_concurrency.lockutils [req-1499a8f1-d0c5-42b7-90d3-4e2e6d22a909 req-d85f32d7-0997-4da3-a15b-36878bc35557 service nova] Lock "fcc937e3-163d-432b-a131-a53c002e5e8d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2022.204125] env[62684]: DEBUG nova.compute.manager [req-1499a8f1-d0c5-42b7-90d3-4e2e6d22a909 req-d85f32d7-0997-4da3-a15b-36878bc35557 service nova] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] No waiting events found dispatching network-vif-plugged-204e0bce-b0f4-4edd-b609-c528bf00f2fe {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2022.204303] env[62684]: WARNING nova.compute.manager [req-1499a8f1-d0c5-42b7-90d3-4e2e6d22a909 req-d85f32d7-0997-4da3-a15b-36878bc35557 service nova] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Received unexpected event network-vif-plugged-204e0bce-b0f4-4edd-b609-c528bf00f2fe for instance with vm_state building and task_state spawning. [ 2022.204467] env[62684]: DEBUG nova.compute.manager [req-1499a8f1-d0c5-42b7-90d3-4e2e6d22a909 req-d85f32d7-0997-4da3-a15b-36878bc35557 service nova] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Received event network-changed-204e0bce-b0f4-4edd-b609-c528bf00f2fe {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2022.204628] env[62684]: DEBUG nova.compute.manager [req-1499a8f1-d0c5-42b7-90d3-4e2e6d22a909 req-d85f32d7-0997-4da3-a15b-36878bc35557 service nova] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Refreshing instance network info cache due to event network-changed-204e0bce-b0f4-4edd-b609-c528bf00f2fe. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2022.204804] env[62684]: DEBUG oslo_concurrency.lockutils [req-1499a8f1-d0c5-42b7-90d3-4e2e6d22a909 req-d85f32d7-0997-4da3-a15b-36878bc35557 service nova] Acquiring lock "refresh_cache-fcc937e3-163d-432b-a131-a53c002e5e8d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2022.412416] env[62684]: DEBUG nova.network.neutron [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2022.444217] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.460s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2022.444886] env[62684]: DEBUG nova.compute.manager [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2022.447806] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 15.005s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2022.448026] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2022.448207] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62684) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2022.449085] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e9c901fc-770a-4414-9eea-a44b895a99b0 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.141s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2022.449191] env[62684]: DEBUG nova.objects.instance [None req-e9c901fc-770a-4414-9eea-a44b895a99b0 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Lazy-loading 'resources' on Instance uuid 548df581-073b-41d4-bcbe-df7342a2beca {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2022.450949] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80edca57-a8aa-4c03-9314-5a926b3b229c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.461045] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbcab9db-74ba-45b7-9997-79d1ab8ca726 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.476800] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1b65397-fef4-4760-b36d-d0ccbd7b8a77 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.484555] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60b7d9b8-f51f-4ecb-8010-2c22a85452f7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.514351] env[62684]: DEBUG nova.compute.manager [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2022.517597] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179099MB free_disk=154GB free_vcpus=48 pci_devices=None {{(pid=62684) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2022.517749] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2022.550444] env[62684]: DEBUG oslo_vmware.api [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': task-2053091, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.599173] env[62684]: DEBUG oslo_vmware.api [None req-53a806c2-5cd6-414a-bf2e-3dfcce08f0ad tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053093, 'name': DeleteDatastoreFile_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.600136] env[62684]: DEBUG nova.network.neutron [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Updating instance_info_cache with network_info: [{"id": "204e0bce-b0f4-4edd-b609-c528bf00f2fe", "address": "fa:16:3e:14:96:90", "network": {"id": "532579c0-d485-4585-bc63-1bbd3af0367a", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1836758902-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e2e0ad7001b4b59805c1d6a3a0caf35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap204e0bce-b0", "ovs_interfaceid": "204e0bce-b0f4-4edd-b609-c528bf00f2fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2022.706581] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7549bac6-7f98-4938-a92d-29505b6f5ea1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2022.952360] env[62684]: DEBUG nova.compute.utils [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2022.954928] env[62684]: DEBUG nova.compute.manager [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2022.955118] env[62684]: DEBUG nova.network.neutron [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2022.996064] env[62684]: DEBUG nova.policy [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6e8b54745b53458eafe4d911d7d6d7d1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6c54f74085f343d2b790145b0d82a9f8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2023.032216] env[62684]: DEBUG oslo_concurrency.lockutils [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2023.051138] env[62684]: DEBUG oslo_vmware.api [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': task-2053091, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2023.100065] env[62684]: DEBUG oslo_vmware.api [None req-53a806c2-5cd6-414a-bf2e-3dfcce08f0ad tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053093, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.512415} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2023.100334] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-53a806c2-5cd6-414a-bf2e-3dfcce08f0ad tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2023.100523] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-53a806c2-5cd6-414a-bf2e-3dfcce08f0ad tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2023.100706] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-53a806c2-5cd6-414a-bf2e-3dfcce08f0ad tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2023.100887] env[62684]: INFO nova.compute.manager [None req-53a806c2-5cd6-414a-bf2e-3dfcce08f0ad tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Took 1.67 seconds to destroy the instance on the hypervisor. [ 2023.101484] env[62684]: DEBUG oslo.service.loopingcall [None req-53a806c2-5cd6-414a-bf2e-3dfcce08f0ad tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2023.101484] env[62684]: DEBUG nova.compute.manager [-] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2023.101484] env[62684]: DEBUG nova.network.neutron [-] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2023.103104] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Releasing lock "refresh_cache-fcc937e3-163d-432b-a131-a53c002e5e8d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2023.103393] env[62684]: DEBUG nova.compute.manager [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Instance network_info: |[{"id": "204e0bce-b0f4-4edd-b609-c528bf00f2fe", "address": "fa:16:3e:14:96:90", "network": {"id": "532579c0-d485-4585-bc63-1bbd3af0367a", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1836758902-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e2e0ad7001b4b59805c1d6a3a0caf35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap204e0bce-b0", "ovs_interfaceid": "204e0bce-b0f4-4edd-b609-c528bf00f2fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2023.103661] env[62684]: DEBUG oslo_concurrency.lockutils [req-1499a8f1-d0c5-42b7-90d3-4e2e6d22a909 req-d85f32d7-0997-4da3-a15b-36878bc35557 service nova] Acquired lock "refresh_cache-fcc937e3-163d-432b-a131-a53c002e5e8d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2023.104214] env[62684]: DEBUG nova.network.neutron [req-1499a8f1-d0c5-42b7-90d3-4e2e6d22a909 req-d85f32d7-0997-4da3-a15b-36878bc35557 service nova] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Refreshing network info cache for port 204e0bce-b0f4-4edd-b609-c528bf00f2fe {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2023.105048] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:14:96:90', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6f41e4aa-0d23-48c4-a359-574abb2e7b9a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '204e0bce-b0f4-4edd-b609-c528bf00f2fe', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2023.112771] env[62684]: DEBUG oslo.service.loopingcall [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2023.116414] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2023.117633] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e6aeb497-0c9f-4361-8090-636482000ca1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.141526] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2023.141526] env[62684]: value = "task-2053094" [ 2023.141526] env[62684]: _type = "Task" [ 2023.141526] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2023.151679] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053094, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2023.285623] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36119e37-d541-445b-9499-e2d24ab9fd25 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.289121] env[62684]: DEBUG nova.network.neutron [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Successfully created port: b509e8a7-cc45-43eb-abb5-1042ccb0b992 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2023.296593] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d80cece-97ac-4e6a-9597-4ed2638a01e0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.334543] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76cbcde7-7289-4534-9f9f-5a00a6757f24 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.342556] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f892188-d6fd-4193-abe2-5f0d205e7ac4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.357441] env[62684]: DEBUG nova.compute.provider_tree [None req-e9c901fc-770a-4414-9eea-a44b895a99b0 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2023.401230] env[62684]: DEBUG nova.compute.manager [req-d5f963de-0383-4c3d-a28c-8da2553c5959 req-8baeb52f-0135-4671-b69b-f59f297c9eca service nova] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Received event network-vif-deleted-9e7719ad-6572-41df-9951-9dc91c818b24 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2023.401412] env[62684]: INFO nova.compute.manager [req-d5f963de-0383-4c3d-a28c-8da2553c5959 req-8baeb52f-0135-4671-b69b-f59f297c9eca service nova] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Neutron deleted interface 9e7719ad-6572-41df-9951-9dc91c818b24; detaching it from the instance and deleting it from the info cache [ 2023.401567] env[62684]: DEBUG nova.network.neutron [req-d5f963de-0383-4c3d-a28c-8da2553c5959 req-8baeb52f-0135-4671-b69b-f59f297c9eca service nova] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2023.456132] env[62684]: DEBUG nova.compute.manager [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2023.551459] env[62684]: DEBUG oslo_vmware.api [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': task-2053091, 'name': PowerOnVM_Task, 'duration_secs': 1.709348} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2023.551752] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2023.551965] env[62684]: INFO nova.compute.manager [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Took 9.59 seconds to spawn the instance on the hypervisor. [ 2023.552200] env[62684]: DEBUG nova.compute.manager [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2023.553043] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0073506-ca9f-45e0-bb49-5a9c4503f347 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.651901] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053094, 'name': CreateVM_Task, 'duration_secs': 0.495799} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2023.652079] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2023.652727] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2023.652898] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2023.653238] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2023.653492] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5d46f97-091f-484c-9f94-2a1ac07291b2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.657731] env[62684]: DEBUG oslo_vmware.api [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Waiting for the task: (returnval){ [ 2023.657731] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5203f1a7-219f-8fdb-73cf-9748c66180e4" [ 2023.657731] env[62684]: _type = "Task" [ 2023.657731] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2023.665674] env[62684]: DEBUG oslo_vmware.api [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5203f1a7-219f-8fdb-73cf-9748c66180e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2023.820453] env[62684]: DEBUG nova.network.neutron [req-1499a8f1-d0c5-42b7-90d3-4e2e6d22a909 req-d85f32d7-0997-4da3-a15b-36878bc35557 service nova] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Updated VIF entry in instance network info cache for port 204e0bce-b0f4-4edd-b609-c528bf00f2fe. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2023.820823] env[62684]: DEBUG nova.network.neutron [req-1499a8f1-d0c5-42b7-90d3-4e2e6d22a909 req-d85f32d7-0997-4da3-a15b-36878bc35557 service nova] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Updating instance_info_cache with network_info: [{"id": "204e0bce-b0f4-4edd-b609-c528bf00f2fe", "address": "fa:16:3e:14:96:90", "network": {"id": "532579c0-d485-4585-bc63-1bbd3af0367a", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1836758902-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e2e0ad7001b4b59805c1d6a3a0caf35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap204e0bce-b0", "ovs_interfaceid": "204e0bce-b0f4-4edd-b609-c528bf00f2fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2023.860336] env[62684]: DEBUG nova.scheduler.client.report [None req-e9c901fc-770a-4414-9eea-a44b895a99b0 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2023.876618] env[62684]: DEBUG nova.network.neutron [-] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2023.904388] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-47dcc800-6f3a-496d-a645-e406abfaba00 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.913652] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ea8297b-0581-4a22-882c-460d2e83d817 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.950907] env[62684]: DEBUG nova.compute.manager [req-d5f963de-0383-4c3d-a28c-8da2553c5959 req-8baeb52f-0135-4671-b69b-f59f297c9eca service nova] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Detach interface failed, port_id=9e7719ad-6572-41df-9951-9dc91c818b24, reason: Instance 274d214a-4b92-4900-a66c-54baea2a68f8 could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2024.070372] env[62684]: INFO nova.compute.manager [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Took 40.70 seconds to build instance. [ 2024.168586] env[62684]: DEBUG oslo_vmware.api [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5203f1a7-219f-8fdb-73cf-9748c66180e4, 'name': SearchDatastore_Task, 'duration_secs': 0.008616} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2024.168946] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2024.169158] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2024.169421] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2024.169575] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2024.169759] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2024.170050] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-24f5a0d1-da5e-4bf5-bf83-02e155b213d8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.178601] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2024.178789] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2024.179622] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f0a726e-57cd-4d2e-92a9-9adc1c500ff1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.185078] env[62684]: DEBUG oslo_vmware.api [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Waiting for the task: (returnval){ [ 2024.185078] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]522ea3f5-6bd5-1ce4-9f02-bfff94dd6ded" [ 2024.185078] env[62684]: _type = "Task" [ 2024.185078] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2024.193573] env[62684]: DEBUG oslo_vmware.api [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]522ea3f5-6bd5-1ce4-9f02-bfff94dd6ded, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2024.326374] env[62684]: DEBUG oslo_concurrency.lockutils [req-1499a8f1-d0c5-42b7-90d3-4e2e6d22a909 req-d85f32d7-0997-4da3-a15b-36878bc35557 service nova] Releasing lock "refresh_cache-fcc937e3-163d-432b-a131-a53c002e5e8d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2024.365307] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e9c901fc-770a-4414-9eea-a44b895a99b0 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.916s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2024.368175] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.925s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2024.370097] env[62684]: INFO nova.compute.claims [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2024.378977] env[62684]: INFO nova.compute.manager [-] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Took 1.28 seconds to deallocate network for instance. [ 2024.394990] env[62684]: DEBUG nova.compute.manager [req-aa88896b-2c04-4cbf-b9bc-52ca8abb3885 req-74d88368-5e10-45f4-887d-6ceeccce4f7f service nova] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Received event network-changed-c77f6606-e602-4667-992c-c6a0e052e01a {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2024.395335] env[62684]: DEBUG nova.compute.manager [req-aa88896b-2c04-4cbf-b9bc-52ca8abb3885 req-74d88368-5e10-45f4-887d-6ceeccce4f7f service nova] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Refreshing instance network info cache due to event network-changed-c77f6606-e602-4667-992c-c6a0e052e01a. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2024.395472] env[62684]: DEBUG oslo_concurrency.lockutils [req-aa88896b-2c04-4cbf-b9bc-52ca8abb3885 req-74d88368-5e10-45f4-887d-6ceeccce4f7f service nova] Acquiring lock "refresh_cache-7b29207a-7fa8-4374-819e-c046b2014969" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2024.395625] env[62684]: DEBUG oslo_concurrency.lockutils [req-aa88896b-2c04-4cbf-b9bc-52ca8abb3885 req-74d88368-5e10-45f4-887d-6ceeccce4f7f service nova] Acquired lock "refresh_cache-7b29207a-7fa8-4374-819e-c046b2014969" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2024.395806] env[62684]: DEBUG nova.network.neutron [req-aa88896b-2c04-4cbf-b9bc-52ca8abb3885 req-74d88368-5e10-45f4-887d-6ceeccce4f7f service nova] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Refreshing network info cache for port c77f6606-e602-4667-992c-c6a0e052e01a {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2024.400769] env[62684]: INFO nova.scheduler.client.report [None req-e9c901fc-770a-4414-9eea-a44b895a99b0 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Deleted allocations for instance 548df581-073b-41d4-bcbe-df7342a2beca [ 2024.464841] env[62684]: DEBUG nova.compute.manager [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2024.492230] env[62684]: DEBUG nova.virt.hardware [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2024.492627] env[62684]: DEBUG nova.virt.hardware [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2024.492903] env[62684]: DEBUG nova.virt.hardware [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2024.493247] env[62684]: DEBUG nova.virt.hardware [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2024.493513] env[62684]: DEBUG nova.virt.hardware [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2024.493777] env[62684]: DEBUG nova.virt.hardware [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2024.494184] env[62684]: DEBUG nova.virt.hardware [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2024.494406] env[62684]: DEBUG nova.virt.hardware [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2024.494729] env[62684]: DEBUG nova.virt.hardware [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2024.494816] env[62684]: DEBUG nova.virt.hardware [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2024.494962] env[62684]: DEBUG nova.virt.hardware [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2024.496542] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-311dd486-6696-45e1-9ac6-5fd4f23a7bd0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.505140] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f1cf9e8-d6dc-4a27-90e3-6adda1770a64 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.572455] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71e2f43c-b236-4bd0-ac6a-457b4ace39f9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Lock "7b29207a-7fa8-4374-819e-c046b2014969" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.205s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2024.702603] env[62684]: DEBUG oslo_vmware.api [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]522ea3f5-6bd5-1ce4-9f02-bfff94dd6ded, 'name': SearchDatastore_Task, 'duration_secs': 0.012413} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2024.703712] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5486b4dc-6957-4b62-b072-14141a678a20 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.712016] env[62684]: DEBUG oslo_vmware.api [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Waiting for the task: (returnval){ [ 2024.712016] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]526096c1-ec2b-0bb0-e392-961ee615ffc5" [ 2024.712016] env[62684]: _type = "Task" [ 2024.712016] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2024.718825] env[62684]: DEBUG oslo_vmware.api [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]526096c1-ec2b-0bb0-e392-961ee615ffc5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2024.885825] env[62684]: DEBUG oslo_concurrency.lockutils [None req-53a806c2-5cd6-414a-bf2e-3dfcce08f0ad tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2024.908057] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e9c901fc-770a-4414-9eea-a44b895a99b0 tempest-ServerRescueTestJSONUnderV235-1085839737 tempest-ServerRescueTestJSONUnderV235-1085839737-project-member] Lock "548df581-073b-41d4-bcbe-df7342a2beca" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.798s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2024.940959] env[62684]: DEBUG nova.network.neutron [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Successfully updated port: b509e8a7-cc45-43eb-abb5-1042ccb0b992 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2024.991795] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f29f8725-8e88-4b13-8058-9b8668709f01 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Acquiring lock "7b29207a-7fa8-4374-819e-c046b2014969" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2024.992074] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f29f8725-8e88-4b13-8058-9b8668709f01 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Lock "7b29207a-7fa8-4374-819e-c046b2014969" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2024.992317] env[62684]: INFO nova.compute.manager [None req-f29f8725-8e88-4b13-8058-9b8668709f01 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Rebooting instance [ 2025.182547] env[62684]: DEBUG nova.network.neutron [req-aa88896b-2c04-4cbf-b9bc-52ca8abb3885 req-74d88368-5e10-45f4-887d-6ceeccce4f7f service nova] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Updated VIF entry in instance network info cache for port c77f6606-e602-4667-992c-c6a0e052e01a. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2025.182942] env[62684]: DEBUG nova.network.neutron [req-aa88896b-2c04-4cbf-b9bc-52ca8abb3885 req-74d88368-5e10-45f4-887d-6ceeccce4f7f service nova] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Updating instance_info_cache with network_info: [{"id": "c77f6606-e602-4667-992c-c6a0e052e01a", "address": "fa:16:3e:43:c7:f0", "network": {"id": "899020b7-a29e-4a35-bf3c-f9aebda1208d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1035902693-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "feab568b5c9e41bfa2ca824d44bcc4e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "06eaa4c9-dbc2-4d38-a844-7bf76e7b5a64", "external-id": "nsx-vlan-transportzone-804", "segmentation_id": 804, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc77f6606-e6", "ovs_interfaceid": "c77f6606-e602-4667-992c-c6a0e052e01a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2025.220473] env[62684]: DEBUG oslo_vmware.api [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]526096c1-ec2b-0bb0-e392-961ee615ffc5, 'name': SearchDatastore_Task, 'duration_secs': 0.009739} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2025.220756] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2025.221292] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] fcc937e3-163d-432b-a131-a53c002e5e8d/fcc937e3-163d-432b-a131-a53c002e5e8d.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2025.221292] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ea035c4b-f58e-4d65-aca6-d4f5d27ea380 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.229922] env[62684]: DEBUG oslo_vmware.api [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Waiting for the task: (returnval){ [ 2025.229922] env[62684]: value = "task-2053095" [ 2025.229922] env[62684]: _type = "Task" [ 2025.229922] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2025.238173] env[62684]: DEBUG oslo_vmware.api [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': task-2053095, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2025.444249] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "refresh_cache-a1b7c2a7-f21d-41f4-9102-e656b8205e1f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2025.444384] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquired lock "refresh_cache-a1b7c2a7-f21d-41f4-9102-e656b8205e1f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2025.444900] env[62684]: DEBUG nova.network.neutron [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2025.512764] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f29f8725-8e88-4b13-8058-9b8668709f01 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Acquiring lock "refresh_cache-7b29207a-7fa8-4374-819e-c046b2014969" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2025.663573] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9699d33e-29d9-4182-8921-4a745b8d4e8d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.671871] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-173c9445-7f5c-4186-953a-6588b6f4c72a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.709470] env[62684]: DEBUG oslo_concurrency.lockutils [req-aa88896b-2c04-4cbf-b9bc-52ca8abb3885 req-74d88368-5e10-45f4-887d-6ceeccce4f7f service nova] Releasing lock "refresh_cache-7b29207a-7fa8-4374-819e-c046b2014969" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2025.709470] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f29f8725-8e88-4b13-8058-9b8668709f01 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Acquired lock "refresh_cache-7b29207a-7fa8-4374-819e-c046b2014969" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2025.709470] env[62684]: DEBUG nova.network.neutron [None req-f29f8725-8e88-4b13-8058-9b8668709f01 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2025.709707] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fa18559-7629-4843-89a7-2afc15291645 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.722851] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9df5484c-ab79-4b35-9ecb-186334fead2a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.742407] env[62684]: DEBUG nova.compute.provider_tree [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2025.757980] env[62684]: DEBUG oslo_vmware.api [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': task-2053095, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2026.008286] env[62684]: DEBUG nova.network.neutron [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2026.191154] env[62684]: DEBUG nova.network.neutron [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Updating instance_info_cache with network_info: [{"id": "b509e8a7-cc45-43eb-abb5-1042ccb0b992", "address": "fa:16:3e:e8:03:40", "network": {"id": "aa52badb-0b73-48bc-afaa-5e06a97d5c7d", "bridge": "br-int", "label": "tempest-ServersTestJSON-556342067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c54f74085f343d2b790145b0d82a9f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb509e8a7-cc", "ovs_interfaceid": "b509e8a7-cc45-43eb-abb5-1042ccb0b992", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2026.250809] env[62684]: DEBUG nova.scheduler.client.report [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2026.261098] env[62684]: DEBUG oslo_vmware.api [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': task-2053095, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.972049} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2026.262533] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] fcc937e3-163d-432b-a131-a53c002e5e8d/fcc937e3-163d-432b-a131-a53c002e5e8d.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2026.263384] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2026.263694] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-18d87220-39c4-4f46-b961-832ec2ab3541 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.271226] env[62684]: DEBUG oslo_vmware.api [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Waiting for the task: (returnval){ [ 2026.271226] env[62684]: value = "task-2053096" [ 2026.271226] env[62684]: _type = "Task" [ 2026.271226] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2026.280550] env[62684]: DEBUG oslo_vmware.api [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': task-2053096, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2026.428262] env[62684]: DEBUG nova.compute.manager [req-9f87b6a4-f918-42f4-9777-cf12d2382b60 req-a3c1755d-4df2-4874-a4ee-1bb7f295a519 service nova] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Received event network-vif-plugged-b509e8a7-cc45-43eb-abb5-1042ccb0b992 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2026.428262] env[62684]: DEBUG oslo_concurrency.lockutils [req-9f87b6a4-f918-42f4-9777-cf12d2382b60 req-a3c1755d-4df2-4874-a4ee-1bb7f295a519 service nova] Acquiring lock "a1b7c2a7-f21d-41f4-9102-e656b8205e1f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2026.428339] env[62684]: DEBUG oslo_concurrency.lockutils [req-9f87b6a4-f918-42f4-9777-cf12d2382b60 req-a3c1755d-4df2-4874-a4ee-1bb7f295a519 service nova] Lock "a1b7c2a7-f21d-41f4-9102-e656b8205e1f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2026.428489] env[62684]: DEBUG oslo_concurrency.lockutils [req-9f87b6a4-f918-42f4-9777-cf12d2382b60 req-a3c1755d-4df2-4874-a4ee-1bb7f295a519 service nova] Lock "a1b7c2a7-f21d-41f4-9102-e656b8205e1f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2026.428680] env[62684]: DEBUG nova.compute.manager [req-9f87b6a4-f918-42f4-9777-cf12d2382b60 req-a3c1755d-4df2-4874-a4ee-1bb7f295a519 service nova] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] No waiting events found dispatching network-vif-plugged-b509e8a7-cc45-43eb-abb5-1042ccb0b992 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2026.428864] env[62684]: WARNING nova.compute.manager [req-9f87b6a4-f918-42f4-9777-cf12d2382b60 req-a3c1755d-4df2-4874-a4ee-1bb7f295a519 service nova] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Received unexpected event network-vif-plugged-b509e8a7-cc45-43eb-abb5-1042ccb0b992 for instance with vm_state building and task_state spawning. [ 2026.429049] env[62684]: DEBUG nova.compute.manager [req-9f87b6a4-f918-42f4-9777-cf12d2382b60 req-a3c1755d-4df2-4874-a4ee-1bb7f295a519 service nova] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Received event network-changed-b509e8a7-cc45-43eb-abb5-1042ccb0b992 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2026.429281] env[62684]: DEBUG nova.compute.manager [req-9f87b6a4-f918-42f4-9777-cf12d2382b60 req-a3c1755d-4df2-4874-a4ee-1bb7f295a519 service nova] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Refreshing instance network info cache due to event network-changed-b509e8a7-cc45-43eb-abb5-1042ccb0b992. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2026.429872] env[62684]: DEBUG oslo_concurrency.lockutils [req-9f87b6a4-f918-42f4-9777-cf12d2382b60 req-a3c1755d-4df2-4874-a4ee-1bb7f295a519 service nova] Acquiring lock "refresh_cache-a1b7c2a7-f21d-41f4-9102-e656b8205e1f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2026.441389] env[62684]: DEBUG nova.network.neutron [None req-f29f8725-8e88-4b13-8058-9b8668709f01 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Updating instance_info_cache with network_info: [{"id": "c77f6606-e602-4667-992c-c6a0e052e01a", "address": "fa:16:3e:43:c7:f0", "network": {"id": "899020b7-a29e-4a35-bf3c-f9aebda1208d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1035902693-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "feab568b5c9e41bfa2ca824d44bcc4e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "06eaa4c9-dbc2-4d38-a844-7bf76e7b5a64", "external-id": "nsx-vlan-transportzone-804", "segmentation_id": 804, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc77f6606-e6", "ovs_interfaceid": "c77f6606-e602-4667-992c-c6a0e052e01a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2026.693518] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Releasing lock "refresh_cache-a1b7c2a7-f21d-41f4-9102-e656b8205e1f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2026.693815] env[62684]: DEBUG nova.compute.manager [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Instance network_info: |[{"id": "b509e8a7-cc45-43eb-abb5-1042ccb0b992", "address": "fa:16:3e:e8:03:40", "network": {"id": "aa52badb-0b73-48bc-afaa-5e06a97d5c7d", "bridge": "br-int", "label": "tempest-ServersTestJSON-556342067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c54f74085f343d2b790145b0d82a9f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb509e8a7-cc", "ovs_interfaceid": "b509e8a7-cc45-43eb-abb5-1042ccb0b992", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2026.694242] env[62684]: DEBUG oslo_concurrency.lockutils [req-9f87b6a4-f918-42f4-9777-cf12d2382b60 req-a3c1755d-4df2-4874-a4ee-1bb7f295a519 service nova] Acquired lock "refresh_cache-a1b7c2a7-f21d-41f4-9102-e656b8205e1f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2026.694497] env[62684]: DEBUG nova.network.neutron [req-9f87b6a4-f918-42f4-9777-cf12d2382b60 req-a3c1755d-4df2-4874-a4ee-1bb7f295a519 service nova] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Refreshing network info cache for port b509e8a7-cc45-43eb-abb5-1042ccb0b992 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2026.696179] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e8:03:40', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1fb81f98-6f5a-47ab-a512-27277591d064', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b509e8a7-cc45-43eb-abb5-1042ccb0b992', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2026.705511] env[62684]: DEBUG oslo.service.loopingcall [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2026.709731] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2026.710523] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-01b9130a-2ba6-484f-8d4a-acfe10435bf0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.735439] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2026.735439] env[62684]: value = "task-2053097" [ 2026.735439] env[62684]: _type = "Task" [ 2026.735439] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2026.746528] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053097, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2026.762128] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.394s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2026.762650] env[62684]: DEBUG nova.compute.manager [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2026.765353] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2d7c7229-a6a8-4b01-b13e-508572408208 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.708s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2026.765612] env[62684]: DEBUG nova.objects.instance [None req-2d7c7229-a6a8-4b01-b13e-508572408208 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lazy-loading 'resources' on Instance uuid 06751c34-0724-44ba-a263-ad27fcf2920f {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2026.785029] env[62684]: DEBUG oslo_vmware.api [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': task-2053096, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071095} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2026.785029] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2026.785029] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4097bfa3-6ea4-4b07-a10a-8abfd315dc10 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.810459] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Reconfiguring VM instance instance-00000044 to attach disk [datastore2] fcc937e3-163d-432b-a131-a53c002e5e8d/fcc937e3-163d-432b-a131-a53c002e5e8d.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2026.813890] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ca670fbb-5a0f-4901-b272-36c36e7eec54 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.833922] env[62684]: DEBUG oslo_vmware.api [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Waiting for the task: (returnval){ [ 2026.833922] env[62684]: value = "task-2053098" [ 2026.833922] env[62684]: _type = "Task" [ 2026.833922] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2026.843662] env[62684]: DEBUG oslo_vmware.api [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': task-2053098, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2026.944319] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f29f8725-8e88-4b13-8058-9b8668709f01 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Releasing lock "refresh_cache-7b29207a-7fa8-4374-819e-c046b2014969" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2026.946977] env[62684]: DEBUG nova.compute.manager [None req-f29f8725-8e88-4b13-8058-9b8668709f01 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2026.947876] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05e24a1e-350c-413d-84a5-d9211cccc283 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.972189] env[62684]: DEBUG nova.network.neutron [req-9f87b6a4-f918-42f4-9777-cf12d2382b60 req-a3c1755d-4df2-4874-a4ee-1bb7f295a519 service nova] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Updated VIF entry in instance network info cache for port b509e8a7-cc45-43eb-abb5-1042ccb0b992. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2026.972265] env[62684]: DEBUG nova.network.neutron [req-9f87b6a4-f918-42f4-9777-cf12d2382b60 req-a3c1755d-4df2-4874-a4ee-1bb7f295a519 service nova] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Updating instance_info_cache with network_info: [{"id": "b509e8a7-cc45-43eb-abb5-1042ccb0b992", "address": "fa:16:3e:e8:03:40", "network": {"id": "aa52badb-0b73-48bc-afaa-5e06a97d5c7d", "bridge": "br-int", "label": "tempest-ServersTestJSON-556342067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c54f74085f343d2b790145b0d82a9f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb509e8a7-cc", "ovs_interfaceid": "b509e8a7-cc45-43eb-abb5-1042ccb0b992", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2027.246068] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053097, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2027.268654] env[62684]: DEBUG nova.compute.utils [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2027.272963] env[62684]: DEBUG nova.compute.manager [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2027.273152] env[62684]: DEBUG nova.network.neutron [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2027.329796] env[62684]: DEBUG nova.policy [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e1553097f3b048caa76bff4e5ecfbf5f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '398aed99e10d457e9cadda3239b27831', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2027.344927] env[62684]: DEBUG oslo_vmware.api [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': task-2053098, 'name': ReconfigVM_Task, 'duration_secs': 0.275269} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2027.347270] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Reconfigured VM instance instance-00000044 to attach disk [datastore2] fcc937e3-163d-432b-a131-a53c002e5e8d/fcc937e3-163d-432b-a131-a53c002e5e8d.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2027.348088] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2e979914-d3d2-4c8c-bffb-338b0dd93e9f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.353962] env[62684]: DEBUG oslo_vmware.api [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Waiting for the task: (returnval){ [ 2027.353962] env[62684]: value = "task-2053099" [ 2027.353962] env[62684]: _type = "Task" [ 2027.353962] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2027.364372] env[62684]: DEBUG oslo_vmware.api [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': task-2053099, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2027.476444] env[62684]: DEBUG oslo_concurrency.lockutils [req-9f87b6a4-f918-42f4-9777-cf12d2382b60 req-a3c1755d-4df2-4874-a4ee-1bb7f295a519 service nova] Releasing lock "refresh_cache-a1b7c2a7-f21d-41f4-9102-e656b8205e1f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2027.563879] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2505a6fe-638f-4e96-ae66-f5d210e7167e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.569429] env[62684]: DEBUG nova.network.neutron [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Successfully created port: a7af2687-2eb0-4853-8106-40d1a00c14e2 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2027.573692] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-399e4f0a-a358-4a4c-ade3-75c1f6bfa562 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.605737] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7a63602-90b4-4436-8fda-bb5c24ca97f4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.613687] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4e1351b-d9f8-4c57-a6ae-e3595279c273 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.627361] env[62684]: DEBUG nova.compute.provider_tree [None req-2d7c7229-a6a8-4b01-b13e-508572408208 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2027.747618] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053097, 'name': CreateVM_Task, 'duration_secs': 0.664119} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2027.747987] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2027.748753] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2027.749054] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2027.749887] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2027.749887] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9345bc9-b5df-4572-8828-c1e3ad0f7fda {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.754802] env[62684]: DEBUG oslo_vmware.api [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2027.754802] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5202d17e-91c1-84ba-e11c-4961d62ddab0" [ 2027.754802] env[62684]: _type = "Task" [ 2027.754802] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2027.762597] env[62684]: DEBUG oslo_vmware.api [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5202d17e-91c1-84ba-e11c-4961d62ddab0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2027.773745] env[62684]: DEBUG nova.compute.manager [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2027.808484] env[62684]: DEBUG nova.network.neutron [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Successfully created port: e5c6aaf0-1581-4501-b063-e63ccb14fa99 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2027.864868] env[62684]: DEBUG oslo_vmware.api [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': task-2053099, 'name': Rename_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2027.967147] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88c1f8f2-0d70-41ea-ba7d-d721e247a23f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.974798] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f29f8725-8e88-4b13-8058-9b8668709f01 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Doing hard reboot of VM {{(pid=62684) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 2027.975089] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-27257963-13bc-4962-8b05-6efae384a03a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.983148] env[62684]: DEBUG oslo_vmware.api [None req-f29f8725-8e88-4b13-8058-9b8668709f01 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Waiting for the task: (returnval){ [ 2027.983148] env[62684]: value = "task-2053100" [ 2027.983148] env[62684]: _type = "Task" [ 2027.983148] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2027.991165] env[62684]: DEBUG oslo_vmware.api [None req-f29f8725-8e88-4b13-8058-9b8668709f01 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': task-2053100, 'name': ResetVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2028.131142] env[62684]: DEBUG nova.scheduler.client.report [None req-2d7c7229-a6a8-4b01-b13e-508572408208 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2028.266636] env[62684]: DEBUG oslo_vmware.api [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5202d17e-91c1-84ba-e11c-4961d62ddab0, 'name': SearchDatastore_Task, 'duration_secs': 0.068297} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2028.267154] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2028.267461] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2028.267825] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2028.268166] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2028.268388] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2028.268714] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-652cf0ad-df04-4884-8d21-904366a1dfba {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.277013] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2028.277276] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2028.281392] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-732a259a-6b4a-42e9-a787-d77ad50e56e8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.289601] env[62684]: DEBUG oslo_vmware.api [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2028.289601] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5283a87f-2577-3b3e-90cb-7370bb9eb673" [ 2028.289601] env[62684]: _type = "Task" [ 2028.289601] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2028.297782] env[62684]: DEBUG oslo_vmware.api [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5283a87f-2577-3b3e-90cb-7370bb9eb673, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2028.368190] env[62684]: DEBUG oslo_vmware.api [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': task-2053099, 'name': Rename_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2028.497921] env[62684]: DEBUG oslo_vmware.api [None req-f29f8725-8e88-4b13-8058-9b8668709f01 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': task-2053100, 'name': ResetVM_Task, 'duration_secs': 0.131881} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2028.498279] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f29f8725-8e88-4b13-8058-9b8668709f01 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Did hard reboot of VM {{(pid=62684) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 2028.498546] env[62684]: DEBUG nova.compute.manager [None req-f29f8725-8e88-4b13-8058-9b8668709f01 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2028.499516] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8438a9b-b619-4b57-b891-3aef515d9c50 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.636680] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2d7c7229-a6a8-4b01-b13e-508572408208 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.871s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2028.639089] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 6.121s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2028.659744] env[62684]: INFO nova.scheduler.client.report [None req-2d7c7229-a6a8-4b01-b13e-508572408208 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Deleted allocations for instance 06751c34-0724-44ba-a263-ad27fcf2920f [ 2028.786933] env[62684]: DEBUG nova.compute.manager [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2028.800030] env[62684]: DEBUG oslo_vmware.api [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5283a87f-2577-3b3e-90cb-7370bb9eb673, 'name': SearchDatastore_Task, 'duration_secs': 0.010827} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2028.800866] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56ea6292-203c-47e4-b752-25b0f4f169e6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.806321] env[62684]: DEBUG oslo_vmware.api [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2028.806321] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5283afa7-800e-f22e-8bc9-7e71f124f637" [ 2028.806321] env[62684]: _type = "Task" [ 2028.806321] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2028.817279] env[62684]: DEBUG oslo_vmware.api [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5283afa7-800e-f22e-8bc9-7e71f124f637, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2028.819460] env[62684]: DEBUG nova.virt.hardware [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2028.820931] env[62684]: DEBUG nova.virt.hardware [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2028.820931] env[62684]: DEBUG nova.virt.hardware [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2028.820931] env[62684]: DEBUG nova.virt.hardware [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2028.820931] env[62684]: DEBUG nova.virt.hardware [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2028.820931] env[62684]: DEBUG nova.virt.hardware [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2028.820931] env[62684]: DEBUG nova.virt.hardware [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2028.820931] env[62684]: DEBUG nova.virt.hardware [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2028.820931] env[62684]: DEBUG nova.virt.hardware [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2028.821237] env[62684]: DEBUG nova.virt.hardware [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2028.821237] env[62684]: DEBUG nova.virt.hardware [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2028.821954] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deff33fc-6727-4060-a438-6511b8cba9bf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.829275] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6d5d860-f697-48e8-827a-2eb13f6a8f77 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.863943] env[62684]: DEBUG oslo_vmware.api [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': task-2053099, 'name': Rename_Task, 'duration_secs': 1.099975} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2028.864230] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2028.864478] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c27efe66-beda-4b02-bede-1cb4ebb54364 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.870707] env[62684]: DEBUG oslo_vmware.api [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Waiting for the task: (returnval){ [ 2028.870707] env[62684]: value = "task-2053101" [ 2028.870707] env[62684]: _type = "Task" [ 2028.870707] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2028.878756] env[62684]: DEBUG oslo_vmware.api [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': task-2053101, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2029.011681] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f29f8725-8e88-4b13-8058-9b8668709f01 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Lock "7b29207a-7fa8-4374-819e-c046b2014969" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.019s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2029.167995] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2d7c7229-a6a8-4b01-b13e-508572408208 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "06751c34-0724-44ba-a263-ad27fcf2920f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.228s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2029.312276] env[62684]: DEBUG nova.compute.manager [req-9d5265fb-406d-4fe6-80fd-9acbfe5db54e req-4e74bf46-b046-4eb5-aa8f-592da39d8f94 service nova] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Received event network-vif-plugged-a7af2687-2eb0-4853-8106-40d1a00c14e2 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2029.312997] env[62684]: DEBUG oslo_concurrency.lockutils [req-9d5265fb-406d-4fe6-80fd-9acbfe5db54e req-4e74bf46-b046-4eb5-aa8f-592da39d8f94 service nova] Acquiring lock "380a804e-e1bf-4efa-8bb8-213733778927-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2029.312997] env[62684]: DEBUG oslo_concurrency.lockutils [req-9d5265fb-406d-4fe6-80fd-9acbfe5db54e req-4e74bf46-b046-4eb5-aa8f-592da39d8f94 service nova] Lock "380a804e-e1bf-4efa-8bb8-213733778927-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2029.313299] env[62684]: DEBUG oslo_concurrency.lockutils [req-9d5265fb-406d-4fe6-80fd-9acbfe5db54e req-4e74bf46-b046-4eb5-aa8f-592da39d8f94 service nova] Lock "380a804e-e1bf-4efa-8bb8-213733778927-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2029.313559] env[62684]: DEBUG nova.compute.manager [req-9d5265fb-406d-4fe6-80fd-9acbfe5db54e req-4e74bf46-b046-4eb5-aa8f-592da39d8f94 service nova] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] No waiting events found dispatching network-vif-plugged-a7af2687-2eb0-4853-8106-40d1a00c14e2 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2029.313873] env[62684]: WARNING nova.compute.manager [req-9d5265fb-406d-4fe6-80fd-9acbfe5db54e req-4e74bf46-b046-4eb5-aa8f-592da39d8f94 service nova] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Received unexpected event network-vif-plugged-a7af2687-2eb0-4853-8106-40d1a00c14e2 for instance with vm_state building and task_state spawning. [ 2029.327811] env[62684]: DEBUG oslo_vmware.api [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5283afa7-800e-f22e-8bc9-7e71f124f637, 'name': SearchDatastore_Task, 'duration_secs': 0.010797} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2029.328128] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2029.328403] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] a1b7c2a7-f21d-41f4-9102-e656b8205e1f/a1b7c2a7-f21d-41f4-9102-e656b8205e1f.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2029.328706] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-70f023bc-5b4b-4c6a-b0c1-d0e67125ebaa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.340296] env[62684]: DEBUG oslo_vmware.api [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2029.340296] env[62684]: value = "task-2053102" [ 2029.340296] env[62684]: _type = "Task" [ 2029.340296] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2029.342972] env[62684]: DEBUG nova.network.neutron [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Successfully updated port: a7af2687-2eb0-4853-8106-40d1a00c14e2 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2029.351127] env[62684]: DEBUG oslo_vmware.api [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053102, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2029.386561] env[62684]: DEBUG oslo_vmware.api [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': task-2053101, 'name': PowerOnVM_Task, 'duration_secs': 0.449114} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2029.386824] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2029.387145] env[62684]: INFO nova.compute.manager [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Took 8.90 seconds to spawn the instance on the hypervisor. [ 2029.387424] env[62684]: DEBUG nova.compute.manager [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2029.388954] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d301bbb-04c7-422e-9b05-08ff0337222a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.681804] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance dcb0a5b2-379e-44ff-a9b0-be615943c94e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2029.681988] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 0676806b-c1f0-4c1a-a12d-add2edf1588f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2029.682129] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 025dfe36-1f14-4bda-84a0-d424364b745b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2029.682249] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2029.682389] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance b1f70e39-bf37-4fb8-b95b-653b59bec265 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2029.682577] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 26303c0e-be87-41ff-a15c-e92f91f8a05f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2029.682577] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance ca3d1a73-6f3b-4278-8fe7-03b66f407ba6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2029.682732] env[62684]: WARNING nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 274d214a-4b92-4900-a66c-54baea2a68f8 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 2029.682867] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance feca8680-4baa-4b2c-9875-69a88b351dc0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2029.682928] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 31419285-9fdf-4d37-94d7-d1b08c6b6b05 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2029.683054] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance dab11b88-ac23-43f0-9203-024faf41e1f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2029.683187] env[62684]: WARNING nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 02dc8c41-5092-4f84-9722-37d4df3a459a is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 2029.683302] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance b4cd871a-30ea-4b7a-98ad-00b8676dc2cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2029.683415] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 57537508-06e7-43a4-95c5-c4399b8bf93f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2029.685778] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 0156d807-1ab4-482f-91d1-172bf32bf23c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2029.685778] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 2baabe7a-ed33-4cef-9acc-a7b804610b0a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2029.685778] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 81b7949d-be24-46c9-8dc8-c249b65bb039 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2029.685778] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 7b29207a-7fa8-4374-819e-c046b2014969 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2029.685778] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance fcc937e3-163d-432b-a131-a53c002e5e8d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2029.685778] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance a1b7c2a7-f21d-41f4-9102-e656b8205e1f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2029.685778] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 380a804e-e1bf-4efa-8bb8-213733778927 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2029.806488] env[62684]: DEBUG nova.compute.manager [req-f128856c-f165-4f89-9a01-c1f614ac6577 req-22d14418-0590-40c4-9996-3b064361d3a4 service nova] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Received event network-changed-c77f6606-e602-4667-992c-c6a0e052e01a {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2029.806749] env[62684]: DEBUG nova.compute.manager [req-f128856c-f165-4f89-9a01-c1f614ac6577 req-22d14418-0590-40c4-9996-3b064361d3a4 service nova] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Refreshing instance network info cache due to event network-changed-c77f6606-e602-4667-992c-c6a0e052e01a. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2029.807022] env[62684]: DEBUG oslo_concurrency.lockutils [req-f128856c-f165-4f89-9a01-c1f614ac6577 req-22d14418-0590-40c4-9996-3b064361d3a4 service nova] Acquiring lock "refresh_cache-7b29207a-7fa8-4374-819e-c046b2014969" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2029.807517] env[62684]: DEBUG oslo_concurrency.lockutils [req-f128856c-f165-4f89-9a01-c1f614ac6577 req-22d14418-0590-40c4-9996-3b064361d3a4 service nova] Acquired lock "refresh_cache-7b29207a-7fa8-4374-819e-c046b2014969" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2029.807726] env[62684]: DEBUG nova.network.neutron [req-f128856c-f165-4f89-9a01-c1f614ac6577 req-22d14418-0590-40c4-9996-3b064361d3a4 service nova] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Refreshing network info cache for port c77f6606-e602-4667-992c-c6a0e052e01a {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2029.858061] env[62684]: DEBUG oslo_vmware.api [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053102, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2029.907853] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bfa554ed-3864-4355-bce9-51144807ae94 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquiring lock "dcb0a5b2-379e-44ff-a9b0-be615943c94e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2029.907853] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bfa554ed-3864-4355-bce9-51144807ae94 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "dcb0a5b2-379e-44ff-a9b0-be615943c94e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2029.907853] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bfa554ed-3864-4355-bce9-51144807ae94 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquiring lock "dcb0a5b2-379e-44ff-a9b0-be615943c94e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2029.907853] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bfa554ed-3864-4355-bce9-51144807ae94 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "dcb0a5b2-379e-44ff-a9b0-be615943c94e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2029.908621] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bfa554ed-3864-4355-bce9-51144807ae94 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "dcb0a5b2-379e-44ff-a9b0-be615943c94e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2029.915019] env[62684]: INFO nova.compute.manager [None req-bfa554ed-3864-4355-bce9-51144807ae94 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Terminating instance [ 2029.917495] env[62684]: DEBUG nova.compute.manager [None req-bfa554ed-3864-4355-bce9-51144807ae94 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2029.917652] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bfa554ed-3864-4355-bce9-51144807ae94 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2029.919093] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20fc3670-cf23-4283-86e1-19084bdf0783 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.921635] env[62684]: INFO nova.compute.manager [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Took 31.75 seconds to build instance. [ 2029.927122] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfa554ed-3864-4355-bce9-51144807ae94 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2029.927454] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3f0173a6-3900-4ed9-8dd5-3375197d3961 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.946063] env[62684]: DEBUG oslo_vmware.api [None req-bfa554ed-3864-4355-bce9-51144807ae94 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 2029.946063] env[62684]: value = "task-2053103" [ 2029.946063] env[62684]: _type = "Task" [ 2029.946063] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2029.957481] env[62684]: DEBUG oslo_vmware.api [None req-bfa554ed-3864-4355-bce9-51144807ae94 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2053103, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2030.186806] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 41da0c18-dd9c-49bb-8b0d-a907575ee22e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2030.187155] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Total usable vcpus: 48, total allocated vcpus: 19 {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2030.187302] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4224MB phys_disk=200GB used_disk=20GB total_vcpus=48 used_vcpus=19 pci_stats=[] {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2030.353453] env[62684]: DEBUG oslo_vmware.api [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053102, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.55384} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2030.354463] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] a1b7c2a7-f21d-41f4-9102-e656b8205e1f/a1b7c2a7-f21d-41f4-9102-e656b8205e1f.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2030.355324] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2030.355324] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-025dc682-92d1-4bb3-a28a-5b84e707d59c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.362708] env[62684]: DEBUG oslo_vmware.api [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2030.362708] env[62684]: value = "task-2053104" [ 2030.362708] env[62684]: _type = "Task" [ 2030.362708] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2030.372377] env[62684]: DEBUG oslo_vmware.api [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053104, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2030.423741] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5dd31484-e08b-4e7a-bb2c-460e9a0a4bdd tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Lock "fcc937e3-163d-432b-a131-a53c002e5e8d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.263s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2030.449018] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0e65816a-b593-4142-ba71-bc3bcb9b7142 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Acquiring lock "7b29207a-7fa8-4374-819e-c046b2014969" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2030.449018] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0e65816a-b593-4142-ba71-bc3bcb9b7142 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Lock "7b29207a-7fa8-4374-819e-c046b2014969" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2030.449018] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0e65816a-b593-4142-ba71-bc3bcb9b7142 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Acquiring lock "7b29207a-7fa8-4374-819e-c046b2014969-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2030.449018] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0e65816a-b593-4142-ba71-bc3bcb9b7142 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Lock "7b29207a-7fa8-4374-819e-c046b2014969-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2030.449018] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0e65816a-b593-4142-ba71-bc3bcb9b7142 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Lock "7b29207a-7fa8-4374-819e-c046b2014969-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2030.455795] env[62684]: INFO nova.compute.manager [None req-0e65816a-b593-4142-ba71-bc3bcb9b7142 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Terminating instance [ 2030.460064] env[62684]: DEBUG nova.compute.manager [None req-0e65816a-b593-4142-ba71-bc3bcb9b7142 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2030.460272] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0e65816a-b593-4142-ba71-bc3bcb9b7142 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2030.461241] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbebe40d-3d4e-45b6-a5fd-236f41e667df {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.466738] env[62684]: DEBUG oslo_vmware.api [None req-bfa554ed-3864-4355-bce9-51144807ae94 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2053103, 'name': PowerOffVM_Task, 'duration_secs': 0.341119} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2030.467293] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfa554ed-3864-4355-bce9-51144807ae94 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2030.467508] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bfa554ed-3864-4355-bce9-51144807ae94 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2030.467768] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6747c3e7-8551-449d-8781-4f9e1bd4e472 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.470607] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac0eb73d-7d08-49ae-b8b5-936e70a9bbe2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.475140] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e65816a-b593-4142-ba71-bc3bcb9b7142 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2030.475665] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d4dddde0-1eae-433c-97e5-45b969f1f5b0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.479938] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acd2a93c-b518-4559-8ee9-85484d7f55dc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.483711] env[62684]: DEBUG oslo_vmware.api [None req-0e65816a-b593-4142-ba71-bc3bcb9b7142 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Waiting for the task: (returnval){ [ 2030.483711] env[62684]: value = "task-2053106" [ 2030.483711] env[62684]: _type = "Task" [ 2030.483711] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2030.514174] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-effc40a4-fc96-420c-9f93-0d20b3b7b359 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.519682] env[62684]: DEBUG oslo_vmware.api [None req-0e65816a-b593-4142-ba71-bc3bcb9b7142 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': task-2053106, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2030.524856] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf265b75-3aab-4697-bb75-822252296325 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.537798] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2030.555782] env[62684]: DEBUG nova.network.neutron [req-f128856c-f165-4f89-9a01-c1f614ac6577 req-22d14418-0590-40c4-9996-3b064361d3a4 service nova] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Updated VIF entry in instance network info cache for port c77f6606-e602-4667-992c-c6a0e052e01a. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2030.556183] env[62684]: DEBUG nova.network.neutron [req-f128856c-f165-4f89-9a01-c1f614ac6577 req-22d14418-0590-40c4-9996-3b064361d3a4 service nova] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Updating instance_info_cache with network_info: [{"id": "c77f6606-e602-4667-992c-c6a0e052e01a", "address": "fa:16:3e:43:c7:f0", "network": {"id": "899020b7-a29e-4a35-bf3c-f9aebda1208d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1035902693-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "feab568b5c9e41bfa2ca824d44bcc4e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "06eaa4c9-dbc2-4d38-a844-7bf76e7b5a64", "external-id": "nsx-vlan-transportzone-804", "segmentation_id": 804, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc77f6606-e6", "ovs_interfaceid": "c77f6606-e602-4667-992c-c6a0e052e01a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2030.702210] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bfa554ed-3864-4355-bce9-51144807ae94 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2030.702449] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bfa554ed-3864-4355-bce9-51144807ae94 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2030.702716] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-bfa554ed-3864-4355-bce9-51144807ae94 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Deleting the datastore file [datastore1] dcb0a5b2-379e-44ff-a9b0-be615943c94e {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2030.703342] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ebd1e718-81e5-4273-a864-58489063c96a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.710018] env[62684]: DEBUG oslo_vmware.api [None req-bfa554ed-3864-4355-bce9-51144807ae94 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for the task: (returnval){ [ 2030.710018] env[62684]: value = "task-2053107" [ 2030.710018] env[62684]: _type = "Task" [ 2030.710018] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2030.717808] env[62684]: DEBUG oslo_vmware.api [None req-bfa554ed-3864-4355-bce9-51144807ae94 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2053107, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2030.873256] env[62684]: DEBUG oslo_vmware.api [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053104, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071002} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2030.873542] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2030.874403] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e75b758-239f-45f0-afad-32add16c0fb3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.897795] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] a1b7c2a7-f21d-41f4-9102-e656b8205e1f/a1b7c2a7-f21d-41f4-9102-e656b8205e1f.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2030.898106] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-99020ddf-1edc-4563-8a83-91694c455712 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.916971] env[62684]: DEBUG oslo_vmware.api [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2030.916971] env[62684]: value = "task-2053108" [ 2030.916971] env[62684]: _type = "Task" [ 2030.916971] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2030.924743] env[62684]: DEBUG oslo_vmware.api [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053108, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2030.993659] env[62684]: DEBUG oslo_vmware.api [None req-0e65816a-b593-4142-ba71-bc3bcb9b7142 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': task-2053106, 'name': PowerOffVM_Task, 'duration_secs': 0.296368} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2030.993948] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e65816a-b593-4142-ba71-bc3bcb9b7142 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2030.994141] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0e65816a-b593-4142-ba71-bc3bcb9b7142 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2030.994407] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-39f35c38-5857-4ea7-942e-30dbaf682e4d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.041338] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2031.059382] env[62684]: DEBUG oslo_concurrency.lockutils [req-f128856c-f165-4f89-9a01-c1f614ac6577 req-22d14418-0590-40c4-9996-3b064361d3a4 service nova] Releasing lock "refresh_cache-7b29207a-7fa8-4374-819e-c046b2014969" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2031.175343] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0e65816a-b593-4142-ba71-bc3bcb9b7142 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2031.175619] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0e65816a-b593-4142-ba71-bc3bcb9b7142 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2031.175804] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e65816a-b593-4142-ba71-bc3bcb9b7142 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Deleting the datastore file [datastore1] 7b29207a-7fa8-4374-819e-c046b2014969 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2031.176110] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ac588f8a-28bc-4eb4-a13a-a2f0dd916a38 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.185878] env[62684]: DEBUG oslo_vmware.api [None req-0e65816a-b593-4142-ba71-bc3bcb9b7142 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Waiting for the task: (returnval){ [ 2031.185878] env[62684]: value = "task-2053110" [ 2031.185878] env[62684]: _type = "Task" [ 2031.185878] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2031.194694] env[62684]: DEBUG oslo_vmware.api [None req-0e65816a-b593-4142-ba71-bc3bcb9b7142 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': task-2053110, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2031.221203] env[62684]: DEBUG oslo_vmware.api [None req-bfa554ed-3864-4355-bce9-51144807ae94 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Task: {'id': task-2053107, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.192314} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2031.221571] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-bfa554ed-3864-4355-bce9-51144807ae94 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2031.221826] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bfa554ed-3864-4355-bce9-51144807ae94 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2031.222134] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bfa554ed-3864-4355-bce9-51144807ae94 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2031.222377] env[62684]: INFO nova.compute.manager [None req-bfa554ed-3864-4355-bce9-51144807ae94 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Took 1.30 seconds to destroy the instance on the hypervisor. [ 2031.222710] env[62684]: DEBUG oslo.service.loopingcall [None req-bfa554ed-3864-4355-bce9-51144807ae94 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2031.222908] env[62684]: DEBUG nova.compute.manager [-] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2031.223222] env[62684]: DEBUG nova.network.neutron [-] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2031.337149] env[62684]: DEBUG nova.compute.manager [req-5f6221dd-99ae-42f4-825c-2b10c1ac933a req-a59eda48-789e-4982-bcda-d40001ff9e69 service nova] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Received event network-changed-a7af2687-2eb0-4853-8106-40d1a00c14e2 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2031.337149] env[62684]: DEBUG nova.compute.manager [req-5f6221dd-99ae-42f4-825c-2b10c1ac933a req-a59eda48-789e-4982-bcda-d40001ff9e69 service nova] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Refreshing instance network info cache due to event network-changed-a7af2687-2eb0-4853-8106-40d1a00c14e2. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2031.337149] env[62684]: DEBUG oslo_concurrency.lockutils [req-5f6221dd-99ae-42f4-825c-2b10c1ac933a req-a59eda48-789e-4982-bcda-d40001ff9e69 service nova] Acquiring lock "refresh_cache-380a804e-e1bf-4efa-8bb8-213733778927" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2031.337149] env[62684]: DEBUG oslo_concurrency.lockutils [req-5f6221dd-99ae-42f4-825c-2b10c1ac933a req-a59eda48-789e-4982-bcda-d40001ff9e69 service nova] Acquired lock "refresh_cache-380a804e-e1bf-4efa-8bb8-213733778927" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2031.337951] env[62684]: DEBUG nova.network.neutron [req-5f6221dd-99ae-42f4-825c-2b10c1ac933a req-a59eda48-789e-4982-bcda-d40001ff9e69 service nova] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Refreshing network info cache for port a7af2687-2eb0-4853-8106-40d1a00c14e2 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2031.428866] env[62684]: DEBUG oslo_vmware.api [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053108, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2031.443610] env[62684]: DEBUG nova.network.neutron [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Successfully updated port: e5c6aaf0-1581-4501-b063-e63ccb14fa99 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2031.546258] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62684) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2031.546460] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.907s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2031.546833] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7549bac6-7f98-4938-a92d-29505b6f5ea1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.840s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2031.547116] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7549bac6-7f98-4938-a92d-29505b6f5ea1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2031.549249] env[62684]: DEBUG oslo_concurrency.lockutils [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.517s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2031.550726] env[62684]: INFO nova.compute.claims [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2031.573601] env[62684]: INFO nova.scheduler.client.report [None req-7549bac6-7f98-4938-a92d-29505b6f5ea1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Deleted allocations for instance 02dc8c41-5092-4f84-9722-37d4df3a459a [ 2031.696349] env[62684]: DEBUG oslo_vmware.api [None req-0e65816a-b593-4142-ba71-bc3bcb9b7142 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': task-2053110, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.135363} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2031.696624] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e65816a-b593-4142-ba71-bc3bcb9b7142 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2031.697721] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0e65816a-b593-4142-ba71-bc3bcb9b7142 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2031.697721] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0e65816a-b593-4142-ba71-bc3bcb9b7142 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2031.697721] env[62684]: INFO nova.compute.manager [None req-0e65816a-b593-4142-ba71-bc3bcb9b7142 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Took 1.24 seconds to destroy the instance on the hypervisor. [ 2031.697721] env[62684]: DEBUG oslo.service.loopingcall [None req-0e65816a-b593-4142-ba71-bc3bcb9b7142 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2031.697721] env[62684]: DEBUG nova.compute.manager [-] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2031.697957] env[62684]: DEBUG nova.network.neutron [-] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2031.836273] env[62684]: DEBUG nova.compute.manager [req-519403eb-a720-4aae-bad3-36062298979e req-cd368bed-80bd-4a6f-a43b-d0d485a5ee80 service nova] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Received event network-vif-deleted-617e9c54-b56e-4945-b890-de6be33b657b {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2031.836947] env[62684]: INFO nova.compute.manager [req-519403eb-a720-4aae-bad3-36062298979e req-cd368bed-80bd-4a6f-a43b-d0d485a5ee80 service nova] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Neutron deleted interface 617e9c54-b56e-4945-b890-de6be33b657b; detaching it from the instance and deleting it from the info cache [ 2031.837227] env[62684]: DEBUG nova.network.neutron [req-519403eb-a720-4aae-bad3-36062298979e req-cd368bed-80bd-4a6f-a43b-d0d485a5ee80 service nova] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2031.874974] env[62684]: DEBUG nova.network.neutron [req-5f6221dd-99ae-42f4-825c-2b10c1ac933a req-a59eda48-789e-4982-bcda-d40001ff9e69 service nova] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2031.928313] env[62684]: DEBUG oslo_vmware.api [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053108, 'name': ReconfigVM_Task, 'duration_secs': 0.617641} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2031.928727] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Reconfigured VM instance instance-00000045 to attach disk [datastore1] a1b7c2a7-f21d-41f4-9102-e656b8205e1f/a1b7c2a7-f21d-41f4-9102-e656b8205e1f.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2031.929445] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c92d9dda-a57f-40f1-8d1b-fcfa7188cb2d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.937865] env[62684]: DEBUG oslo_vmware.api [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2031.937865] env[62684]: value = "task-2053111" [ 2031.937865] env[62684]: _type = "Task" [ 2031.937865] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2031.948695] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Acquiring lock "refresh_cache-380a804e-e1bf-4efa-8bb8-213733778927" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2031.949185] env[62684]: DEBUG oslo_vmware.api [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053111, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2031.970208] env[62684]: DEBUG nova.network.neutron [-] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2031.980059] env[62684]: DEBUG nova.network.neutron [req-5f6221dd-99ae-42f4-825c-2b10c1ac933a req-a59eda48-789e-4982-bcda-d40001ff9e69 service nova] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2032.081610] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7549bac6-7f98-4938-a92d-29505b6f5ea1 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Lock "02dc8c41-5092-4f84-9722-37d4df3a459a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.953s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2032.340729] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-56941083-432c-4d76-bf84-668a692fd9e0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.352457] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cde08dec-8e2c-46fe-bc3e-b403bbf5e914 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.387275] env[62684]: DEBUG nova.compute.manager [req-519403eb-a720-4aae-bad3-36062298979e req-cd368bed-80bd-4a6f-a43b-d0d485a5ee80 service nova] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Detach interface failed, port_id=617e9c54-b56e-4945-b890-de6be33b657b, reason: Instance dcb0a5b2-379e-44ff-a9b0-be615943c94e could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2032.447235] env[62684]: DEBUG oslo_vmware.api [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053111, 'name': Rename_Task, 'duration_secs': 0.168605} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2032.447561] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2032.447759] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ba7dfc5e-f877-49bf-a690-75c753a104f0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.454155] env[62684]: DEBUG oslo_vmware.api [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2032.454155] env[62684]: value = "task-2053112" [ 2032.454155] env[62684]: _type = "Task" [ 2032.454155] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2032.461733] env[62684]: DEBUG oslo_vmware.api [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053112, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2032.476597] env[62684]: INFO nova.compute.manager [-] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Took 1.25 seconds to deallocate network for instance. [ 2032.476892] env[62684]: DEBUG nova.network.neutron [-] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2032.483125] env[62684]: DEBUG oslo_concurrency.lockutils [req-5f6221dd-99ae-42f4-825c-2b10c1ac933a req-a59eda48-789e-4982-bcda-d40001ff9e69 service nova] Releasing lock "refresh_cache-380a804e-e1bf-4efa-8bb8-213733778927" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2032.483125] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Acquired lock "refresh_cache-380a804e-e1bf-4efa-8bb8-213733778927" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2032.483125] env[62684]: DEBUG nova.network.neutron [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2032.694924] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b5219080-cfc7-4054-885a-5ec15cf47ca5 tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Acquiring lock "fcc937e3-163d-432b-a131-a53c002e5e8d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2032.695206] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b5219080-cfc7-4054-885a-5ec15cf47ca5 tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Lock "fcc937e3-163d-432b-a131-a53c002e5e8d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2032.695414] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b5219080-cfc7-4054-885a-5ec15cf47ca5 tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Acquiring lock "fcc937e3-163d-432b-a131-a53c002e5e8d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2032.695596] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b5219080-cfc7-4054-885a-5ec15cf47ca5 tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Lock "fcc937e3-163d-432b-a131-a53c002e5e8d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2032.695764] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b5219080-cfc7-4054-885a-5ec15cf47ca5 tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Lock "fcc937e3-163d-432b-a131-a53c002e5e8d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2032.699733] env[62684]: INFO nova.compute.manager [None req-b5219080-cfc7-4054-885a-5ec15cf47ca5 tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Terminating instance [ 2032.701480] env[62684]: DEBUG nova.compute.manager [None req-b5219080-cfc7-4054-885a-5ec15cf47ca5 tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2032.701677] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b5219080-cfc7-4054-885a-5ec15cf47ca5 tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2032.702508] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35622a61-e810-412d-a3fd-8ada1049eebb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.711356] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5219080-cfc7-4054-885a-5ec15cf47ca5 tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2032.711639] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-68c2f264-b69a-4af2-84d2-879e9e9e0c85 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.720287] env[62684]: DEBUG oslo_vmware.api [None req-b5219080-cfc7-4054-885a-5ec15cf47ca5 tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Waiting for the task: (returnval){ [ 2032.720287] env[62684]: value = "task-2053113" [ 2032.720287] env[62684]: _type = "Task" [ 2032.720287] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2032.729080] env[62684]: DEBUG oslo_vmware.api [None req-b5219080-cfc7-4054-885a-5ec15cf47ca5 tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': task-2053113, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2032.841970] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a47495a-c600-4013-9327-7492834c1142 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.850394] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e382bdab-6df7-44a6-812c-5ace552d8c93 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.882412] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24cb480f-0c12-4fd5-9220-4ca956efa2c1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.891852] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95ee7aef-0c92-4d34-a3b4-d7443c1e929b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.907356] env[62684]: DEBUG nova.compute.provider_tree [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2032.963714] env[62684]: DEBUG oslo_vmware.api [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053112, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2032.980853] env[62684]: INFO nova.compute.manager [-] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Took 1.28 seconds to deallocate network for instance. [ 2032.986224] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bfa554ed-3864-4355-bce9-51144807ae94 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2033.019354] env[62684]: DEBUG nova.network.neutron [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2033.230943] env[62684]: DEBUG oslo_vmware.api [None req-b5219080-cfc7-4054-885a-5ec15cf47ca5 tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': task-2053113, 'name': PowerOffVM_Task, 'duration_secs': 0.234162} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2033.231279] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5219080-cfc7-4054-885a-5ec15cf47ca5 tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2033.231457] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b5219080-cfc7-4054-885a-5ec15cf47ca5 tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2033.231788] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fe8baf23-a321-48b7-bc94-471c98d60dcd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.313553] env[62684]: DEBUG nova.network.neutron [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Updating instance_info_cache with network_info: [{"id": "a7af2687-2eb0-4853-8106-40d1a00c14e2", "address": "fa:16:3e:2b:5f:35", "network": {"id": "a73819e1-7ffc-488f-8fb0-615e55ac8750", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1758094564", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.25", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "398aed99e10d457e9cadda3239b27831", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7af2687-2e", "ovs_interfaceid": "a7af2687-2eb0-4853-8106-40d1a00c14e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e5c6aaf0-1581-4501-b063-e63ccb14fa99", "address": "fa:16:3e:e4:af:70", "network": {"id": "967e2053-e27f-447c-9360-dd8d9c4f2e41", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-773447645", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.144", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "398aed99e10d457e9cadda3239b27831", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "683a619f-b10d-41a3-8c03-4f69f6c9ce53", "external-id": "nsx-vlan-transportzone-898", "segmentation_id": 898, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5c6aaf0-15", "ovs_interfaceid": "e5c6aaf0-1581-4501-b063-e63ccb14fa99", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2033.339014] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0669d8eb-4c91-4780-aa5f-1f6b840f08e5 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquiring lock "0676806b-c1f0-4c1a-a12d-add2edf1588f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2033.339354] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0669d8eb-4c91-4780-aa5f-1f6b840f08e5 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Lock "0676806b-c1f0-4c1a-a12d-add2edf1588f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2033.339508] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0669d8eb-4c91-4780-aa5f-1f6b840f08e5 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquiring lock "0676806b-c1f0-4c1a-a12d-add2edf1588f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2033.339693] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0669d8eb-4c91-4780-aa5f-1f6b840f08e5 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Lock "0676806b-c1f0-4c1a-a12d-add2edf1588f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2033.339866] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0669d8eb-4c91-4780-aa5f-1f6b840f08e5 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Lock "0676806b-c1f0-4c1a-a12d-add2edf1588f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2033.341781] env[62684]: INFO nova.compute.manager [None req-0669d8eb-4c91-4780-aa5f-1f6b840f08e5 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Terminating instance [ 2033.343421] env[62684]: DEBUG nova.compute.manager [None req-0669d8eb-4c91-4780-aa5f-1f6b840f08e5 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2033.343618] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0669d8eb-4c91-4780-aa5f-1f6b840f08e5 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2033.344690] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a64e2ad3-21fa-446e-b8a6-7df8aca3eaab {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.352457] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-0669d8eb-4c91-4780-aa5f-1f6b840f08e5 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2033.352644] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a00ad1bf-5a56-4c86-942c-56c0674a4078 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.359264] env[62684]: DEBUG oslo_vmware.api [None req-0669d8eb-4c91-4780-aa5f-1f6b840f08e5 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for the task: (returnval){ [ 2033.359264] env[62684]: value = "task-2053115" [ 2033.359264] env[62684]: _type = "Task" [ 2033.359264] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2033.365486] env[62684]: DEBUG nova.compute.manager [req-813984fe-facb-48ab-95fd-9a860244faa3 req-33279d5c-0d47-41b6-8f20-72412a809059 service nova] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Received event network-vif-plugged-e5c6aaf0-1581-4501-b063-e63ccb14fa99 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2033.365774] env[62684]: DEBUG oslo_concurrency.lockutils [req-813984fe-facb-48ab-95fd-9a860244faa3 req-33279d5c-0d47-41b6-8f20-72412a809059 service nova] Acquiring lock "380a804e-e1bf-4efa-8bb8-213733778927-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2033.366158] env[62684]: DEBUG oslo_concurrency.lockutils [req-813984fe-facb-48ab-95fd-9a860244faa3 req-33279d5c-0d47-41b6-8f20-72412a809059 service nova] Lock "380a804e-e1bf-4efa-8bb8-213733778927-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2033.366427] env[62684]: DEBUG oslo_concurrency.lockutils [req-813984fe-facb-48ab-95fd-9a860244faa3 req-33279d5c-0d47-41b6-8f20-72412a809059 service nova] Lock "380a804e-e1bf-4efa-8bb8-213733778927-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2033.366735] env[62684]: DEBUG nova.compute.manager [req-813984fe-facb-48ab-95fd-9a860244faa3 req-33279d5c-0d47-41b6-8f20-72412a809059 service nova] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] No waiting events found dispatching network-vif-plugged-e5c6aaf0-1581-4501-b063-e63ccb14fa99 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2033.367065] env[62684]: WARNING nova.compute.manager [req-813984fe-facb-48ab-95fd-9a860244faa3 req-33279d5c-0d47-41b6-8f20-72412a809059 service nova] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Received unexpected event network-vif-plugged-e5c6aaf0-1581-4501-b063-e63ccb14fa99 for instance with vm_state building and task_state spawning. [ 2033.367350] env[62684]: DEBUG nova.compute.manager [req-813984fe-facb-48ab-95fd-9a860244faa3 req-33279d5c-0d47-41b6-8f20-72412a809059 service nova] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Received event network-changed-e5c6aaf0-1581-4501-b063-e63ccb14fa99 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2033.367547] env[62684]: DEBUG nova.compute.manager [req-813984fe-facb-48ab-95fd-9a860244faa3 req-33279d5c-0d47-41b6-8f20-72412a809059 service nova] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Refreshing instance network info cache due to event network-changed-e5c6aaf0-1581-4501-b063-e63ccb14fa99. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2033.367728] env[62684]: DEBUG oslo_concurrency.lockutils [req-813984fe-facb-48ab-95fd-9a860244faa3 req-33279d5c-0d47-41b6-8f20-72412a809059 service nova] Acquiring lock "refresh_cache-380a804e-e1bf-4efa-8bb8-213733778927" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2033.374308] env[62684]: DEBUG oslo_vmware.api [None req-0669d8eb-4c91-4780-aa5f-1f6b840f08e5 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2053115, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2033.429724] env[62684]: ERROR nova.scheduler.client.report [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [req-ff7ffe93-dfc0-4d6e-9052-db256b3d4dcf] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c23c281e-ec1f-4876-972e-a98655f2084f. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ff7ffe93-dfc0-4d6e-9052-db256b3d4dcf"}]} [ 2033.446972] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b5219080-cfc7-4054-885a-5ec15cf47ca5 tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2033.447268] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b5219080-cfc7-4054-885a-5ec15cf47ca5 tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2033.447614] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5219080-cfc7-4054-885a-5ec15cf47ca5 tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Deleting the datastore file [datastore2] fcc937e3-163d-432b-a131-a53c002e5e8d {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2033.448607] env[62684]: DEBUG nova.scheduler.client.report [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Refreshing inventories for resource provider c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2033.450682] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c8ebb03e-93ca-4340-9858-25abe46420f8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.459926] env[62684]: DEBUG oslo_vmware.api [None req-b5219080-cfc7-4054-885a-5ec15cf47ca5 tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Waiting for the task: (returnval){ [ 2033.459926] env[62684]: value = "task-2053116" [ 2033.459926] env[62684]: _type = "Task" [ 2033.459926] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2033.467043] env[62684]: DEBUG oslo_vmware.api [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053112, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2033.467997] env[62684]: DEBUG nova.scheduler.client.report [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Updating ProviderTree inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2033.468229] env[62684]: DEBUG nova.compute.provider_tree [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2033.474735] env[62684]: DEBUG oslo_vmware.api [None req-b5219080-cfc7-4054-885a-5ec15cf47ca5 tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': task-2053116, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2033.481267] env[62684]: DEBUG nova.scheduler.client.report [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Refreshing aggregate associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, aggregates: None {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2033.490263] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0e65816a-b593-4142-ba71-bc3bcb9b7142 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2033.501192] env[62684]: DEBUG nova.scheduler.client.report [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Refreshing trait associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2033.758144] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1d7017c-0956-404b-a203-975205f9b525 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.765402] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66ed9958-733d-435a-946d-c7500bf587af {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.796028] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9e6c3e9-5950-488c-a4e7-6099616fd651 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.803389] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cf6d181-3805-4f54-ba0b-85fa543819f9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.818477] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Releasing lock "refresh_cache-380a804e-e1bf-4efa-8bb8-213733778927" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2033.818844] env[62684]: DEBUG nova.compute.manager [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Instance network_info: |[{"id": "a7af2687-2eb0-4853-8106-40d1a00c14e2", "address": "fa:16:3e:2b:5f:35", "network": {"id": "a73819e1-7ffc-488f-8fb0-615e55ac8750", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1758094564", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.25", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "398aed99e10d457e9cadda3239b27831", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7af2687-2e", "ovs_interfaceid": "a7af2687-2eb0-4853-8106-40d1a00c14e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e5c6aaf0-1581-4501-b063-e63ccb14fa99", "address": "fa:16:3e:e4:af:70", "network": {"id": "967e2053-e27f-447c-9360-dd8d9c4f2e41", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-773447645", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.144", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "398aed99e10d457e9cadda3239b27831", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "683a619f-b10d-41a3-8c03-4f69f6c9ce53", "external-id": "nsx-vlan-transportzone-898", "segmentation_id": 898, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5c6aaf0-15", "ovs_interfaceid": "e5c6aaf0-1581-4501-b063-e63ccb14fa99", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2033.819341] env[62684]: DEBUG nova.compute.provider_tree [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2033.820654] env[62684]: DEBUG oslo_concurrency.lockutils [req-813984fe-facb-48ab-95fd-9a860244faa3 req-33279d5c-0d47-41b6-8f20-72412a809059 service nova] Acquired lock "refresh_cache-380a804e-e1bf-4efa-8bb8-213733778927" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2033.820849] env[62684]: DEBUG nova.network.neutron [req-813984fe-facb-48ab-95fd-9a860244faa3 req-33279d5c-0d47-41b6-8f20-72412a809059 service nova] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Refreshing network info cache for port e5c6aaf0-1581-4501-b063-e63ccb14fa99 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2033.822073] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:5f:35', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2c7c1b46-cb81-45da-b5aa-7905d4da5854', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a7af2687-2eb0-4853-8106-40d1a00c14e2', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:e4:af:70', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '683a619f-b10d-41a3-8c03-4f69f6c9ce53', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e5c6aaf0-1581-4501-b063-e63ccb14fa99', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2033.830847] env[62684]: DEBUG oslo.service.loopingcall [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2033.832183] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2033.832358] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b8bd5aff-00f4-4571-95ef-ea45487f2363 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.854388] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2033.854388] env[62684]: value = "task-2053117" [ 2033.854388] env[62684]: _type = "Task" [ 2033.854388] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2033.870383] env[62684]: DEBUG nova.compute.manager [req-da213b03-3eb6-4602-8ddc-36f9ecd3a971 req-e234f015-4b1e-4fc4-948a-ec35f2f04b6f service nova] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Received event network-vif-deleted-c77f6606-e602-4667-992c-c6a0e052e01a {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2033.870383] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053117, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2033.875175] env[62684]: DEBUG oslo_vmware.api [None req-0669d8eb-4c91-4780-aa5f-1f6b840f08e5 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2053115, 'name': PowerOffVM_Task, 'duration_secs': 0.200486} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2033.875643] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-0669d8eb-4c91-4780-aa5f-1f6b840f08e5 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2033.875643] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0669d8eb-4c91-4780-aa5f-1f6b840f08e5 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2033.875833] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5bb529df-9261-4a0d-9d23-32234d114cd3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.967247] env[62684]: DEBUG oslo_vmware.api [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053112, 'name': PowerOnVM_Task, 'duration_secs': 1.138006} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2033.967882] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2033.968124] env[62684]: INFO nova.compute.manager [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Took 9.50 seconds to spawn the instance on the hypervisor. [ 2033.968325] env[62684]: DEBUG nova.compute.manager [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2033.969060] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36ced74f-146e-42d2-8ee8-9b82ddaa9489 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.973888] env[62684]: DEBUG oslo_vmware.api [None req-b5219080-cfc7-4054-885a-5ec15cf47ca5 tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Task: {'id': task-2053116, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156385} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2033.974427] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5219080-cfc7-4054-885a-5ec15cf47ca5 tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2033.974612] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b5219080-cfc7-4054-885a-5ec15cf47ca5 tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2033.974804] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b5219080-cfc7-4054-885a-5ec15cf47ca5 tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2033.975247] env[62684]: INFO nova.compute.manager [None req-b5219080-cfc7-4054-885a-5ec15cf47ca5 tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Took 1.27 seconds to destroy the instance on the hypervisor. [ 2033.975247] env[62684]: DEBUG oslo.service.loopingcall [None req-b5219080-cfc7-4054-885a-5ec15cf47ca5 tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2033.975414] env[62684]: DEBUG nova.compute.manager [-] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2033.975507] env[62684]: DEBUG nova.network.neutron [-] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2034.201239] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0669d8eb-4c91-4780-aa5f-1f6b840f08e5 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2034.201549] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0669d8eb-4c91-4780-aa5f-1f6b840f08e5 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2034.201727] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-0669d8eb-4c91-4780-aa5f-1f6b840f08e5 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Deleting the datastore file [datastore2] 0676806b-c1f0-4c1a-a12d-add2edf1588f {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2034.201954] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e42602ca-d130-439f-b7e9-e66b7dbfd8f1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.209548] env[62684]: DEBUG oslo_vmware.api [None req-0669d8eb-4c91-4780-aa5f-1f6b840f08e5 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for the task: (returnval){ [ 2034.209548] env[62684]: value = "task-2053119" [ 2034.209548] env[62684]: _type = "Task" [ 2034.209548] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2034.217951] env[62684]: DEBUG oslo_vmware.api [None req-0669d8eb-4c91-4780-aa5f-1f6b840f08e5 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2053119, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2034.366492] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053117, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2034.372768] env[62684]: DEBUG nova.scheduler.client.report [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Updated inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f with generation 99 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2034.373124] env[62684]: DEBUG nova.compute.provider_tree [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Updating resource provider c23c281e-ec1f-4876-972e-a98655f2084f generation from 99 to 100 during operation: update_inventory {{(pid=62684) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2034.373420] env[62684]: DEBUG nova.compute.provider_tree [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2034.490724] env[62684]: INFO nova.compute.manager [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Took 28.25 seconds to build instance. [ 2034.590642] env[62684]: DEBUG nova.network.neutron [req-813984fe-facb-48ab-95fd-9a860244faa3 req-33279d5c-0d47-41b6-8f20-72412a809059 service nova] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Updated VIF entry in instance network info cache for port e5c6aaf0-1581-4501-b063-e63ccb14fa99. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2034.590642] env[62684]: DEBUG nova.network.neutron [req-813984fe-facb-48ab-95fd-9a860244faa3 req-33279d5c-0d47-41b6-8f20-72412a809059 service nova] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Updating instance_info_cache with network_info: [{"id": "a7af2687-2eb0-4853-8106-40d1a00c14e2", "address": "fa:16:3e:2b:5f:35", "network": {"id": "a73819e1-7ffc-488f-8fb0-615e55ac8750", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1758094564", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.25", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "398aed99e10d457e9cadda3239b27831", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7af2687-2e", "ovs_interfaceid": "a7af2687-2eb0-4853-8106-40d1a00c14e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e5c6aaf0-1581-4501-b063-e63ccb14fa99", "address": "fa:16:3e:e4:af:70", "network": {"id": "967e2053-e27f-447c-9360-dd8d9c4f2e41", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-773447645", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.144", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "398aed99e10d457e9cadda3239b27831", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "683a619f-b10d-41a3-8c03-4f69f6c9ce53", "external-id": "nsx-vlan-transportzone-898", "segmentation_id": 898, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5c6aaf0-15", "ovs_interfaceid": "e5c6aaf0-1581-4501-b063-e63ccb14fa99", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2034.719651] env[62684]: DEBUG oslo_vmware.api [None req-0669d8eb-4c91-4780-aa5f-1f6b840f08e5 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Task: {'id': task-2053119, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.163269} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2034.719933] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-0669d8eb-4c91-4780-aa5f-1f6b840f08e5 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2034.720150] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0669d8eb-4c91-4780-aa5f-1f6b840f08e5 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2034.720338] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0669d8eb-4c91-4780-aa5f-1f6b840f08e5 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2034.720521] env[62684]: INFO nova.compute.manager [None req-0669d8eb-4c91-4780-aa5f-1f6b840f08e5 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Took 1.38 seconds to destroy the instance on the hypervisor. [ 2034.720896] env[62684]: DEBUG oslo.service.loopingcall [None req-0669d8eb-4c91-4780-aa5f-1f6b840f08e5 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2034.721029] env[62684]: DEBUG nova.compute.manager [-] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2034.721126] env[62684]: DEBUG nova.network.neutron [-] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2034.730048] env[62684]: DEBUG nova.network.neutron [-] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2034.866567] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053117, 'name': CreateVM_Task, 'duration_secs': 0.928331} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2034.866826] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2034.867610] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2034.867825] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2034.868141] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2034.868416] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c061cdb-fceb-4b60-8fba-a14f2a7a0816 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.873474] env[62684]: DEBUG oslo_vmware.api [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Waiting for the task: (returnval){ [ 2034.873474] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5280e192-574e-c0fb-a1aa-077e3c99c91e" [ 2034.873474] env[62684]: _type = "Task" [ 2034.873474] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2034.881743] env[62684]: DEBUG oslo_concurrency.lockutils [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.332s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2034.882289] env[62684]: DEBUG nova.compute.manager [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2034.885504] env[62684]: DEBUG oslo_vmware.api [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5280e192-574e-c0fb-a1aa-077e3c99c91e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2034.885898] env[62684]: DEBUG oslo_concurrency.lockutils [None req-53a806c2-5cd6-414a-bf2e-3dfcce08f0ad tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2034.886123] env[62684]: DEBUG oslo_concurrency.lockutils [None req-53a806c2-5cd6-414a-bf2e-3dfcce08f0ad tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2034.888184] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bfa554ed-3864-4355-bce9-51144807ae94 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.902s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2034.888411] env[62684]: DEBUG nova.objects.instance [None req-bfa554ed-3864-4355-bce9-51144807ae94 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lazy-loading 'resources' on Instance uuid dcb0a5b2-379e-44ff-a9b0-be615943c94e {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2034.916600] env[62684]: INFO nova.scheduler.client.report [None req-53a806c2-5cd6-414a-bf2e-3dfcce08f0ad tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Deleted allocations for instance 274d214a-4b92-4900-a66c-54baea2a68f8 [ 2034.992818] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4af6095c-69ad-4740-b7f1-1c2daac14dde tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "a1b7c2a7-f21d-41f4-9102-e656b8205e1f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.759s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2035.093113] env[62684]: DEBUG oslo_concurrency.lockutils [req-813984fe-facb-48ab-95fd-9a860244faa3 req-33279d5c-0d47-41b6-8f20-72412a809059 service nova] Releasing lock "refresh_cache-380a804e-e1bf-4efa-8bb8-213733778927" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2035.228606] env[62684]: DEBUG oslo_concurrency.lockutils [None req-020c6c93-236b-45a3-9109-e23fbaf1dfa5 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "a1b7c2a7-f21d-41f4-9102-e656b8205e1f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2035.228910] env[62684]: DEBUG oslo_concurrency.lockutils [None req-020c6c93-236b-45a3-9109-e23fbaf1dfa5 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "a1b7c2a7-f21d-41f4-9102-e656b8205e1f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2035.229159] env[62684]: DEBUG oslo_concurrency.lockutils [None req-020c6c93-236b-45a3-9109-e23fbaf1dfa5 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "a1b7c2a7-f21d-41f4-9102-e656b8205e1f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2035.229710] env[62684]: DEBUG oslo_concurrency.lockutils [None req-020c6c93-236b-45a3-9109-e23fbaf1dfa5 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "a1b7c2a7-f21d-41f4-9102-e656b8205e1f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2035.230083] env[62684]: DEBUG oslo_concurrency.lockutils [None req-020c6c93-236b-45a3-9109-e23fbaf1dfa5 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "a1b7c2a7-f21d-41f4-9102-e656b8205e1f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2035.232095] env[62684]: INFO nova.compute.manager [-] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Took 1.26 seconds to deallocate network for instance. [ 2035.232603] env[62684]: INFO nova.compute.manager [None req-020c6c93-236b-45a3-9109-e23fbaf1dfa5 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Terminating instance [ 2035.237957] env[62684]: DEBUG nova.compute.manager [None req-020c6c93-236b-45a3-9109-e23fbaf1dfa5 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2035.238205] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-020c6c93-236b-45a3-9109-e23fbaf1dfa5 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2035.239046] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af377687-1640-4de5-b1bb-9085adbc0b0b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.247110] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-020c6c93-236b-45a3-9109-e23fbaf1dfa5 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2035.247348] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c79d5638-1a6c-48a4-9008-2ba4b06394bc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.255398] env[62684]: DEBUG oslo_vmware.api [None req-020c6c93-236b-45a3-9109-e23fbaf1dfa5 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2035.255398] env[62684]: value = "task-2053120" [ 2035.255398] env[62684]: _type = "Task" [ 2035.255398] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2035.263513] env[62684]: DEBUG oslo_vmware.api [None req-020c6c93-236b-45a3-9109-e23fbaf1dfa5 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053120, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2035.385525] env[62684]: DEBUG oslo_vmware.api [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5280e192-574e-c0fb-a1aa-077e3c99c91e, 'name': SearchDatastore_Task, 'duration_secs': 0.029563} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2035.387118] env[62684]: DEBUG nova.compute.utils [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2035.388710] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2035.389131] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2035.389524] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2035.389922] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2035.390263] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2035.390954] env[62684]: DEBUG nova.compute.manager [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2035.391249] env[62684]: DEBUG nova.network.neutron [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2035.395979] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d3e4890c-59af-47ad-b312-fd4aaa408bb5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.401832] env[62684]: DEBUG nova.compute.manager [req-12fbe69b-b7a6-4869-8d93-dda4220bdef2 req-07824e44-ec67-4a0f-bf93-ef8a907f9820 service nova] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Received event network-vif-deleted-204e0bce-b0f4-4edd-b609-c528bf00f2fe {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2035.402527] env[62684]: DEBUG nova.compute.manager [req-12fbe69b-b7a6-4869-8d93-dda4220bdef2 req-07824e44-ec67-4a0f-bf93-ef8a907f9820 service nova] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Received event network-vif-deleted-10023d3d-f0cd-49c9-984f-fb3f2af83e3b {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2035.402675] env[62684]: INFO nova.compute.manager [req-12fbe69b-b7a6-4869-8d93-dda4220bdef2 req-07824e44-ec67-4a0f-bf93-ef8a907f9820 service nova] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Neutron deleted interface 10023d3d-f0cd-49c9-984f-fb3f2af83e3b; detaching it from the instance and deleting it from the info cache [ 2035.402863] env[62684]: DEBUG nova.network.neutron [req-12fbe69b-b7a6-4869-8d93-dda4220bdef2 req-07824e44-ec67-4a0f-bf93-ef8a907f9820 service nova] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2035.412669] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2035.413220] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2035.414926] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a74420e0-d447-4da6-847b-af9463ccb7ad {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.425089] env[62684]: DEBUG oslo_vmware.api [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Waiting for the task: (returnval){ [ 2035.425089] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52979c14-86d8-abad-d25c-b739e472a77c" [ 2035.425089] env[62684]: _type = "Task" [ 2035.425089] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2035.429026] env[62684]: DEBUG oslo_concurrency.lockutils [None req-53a806c2-5cd6-414a-bf2e-3dfcce08f0ad tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "274d214a-4b92-4900-a66c-54baea2a68f8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.011s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2035.439574] env[62684]: DEBUG oslo_vmware.api [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52979c14-86d8-abad-d25c-b739e472a77c, 'name': SearchDatastore_Task, 'duration_secs': 0.009837} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2035.443829] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c473c8fb-c6ad-4d45-86de-7ff0bda392aa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.449634] env[62684]: DEBUG oslo_vmware.api [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Waiting for the task: (returnval){ [ 2035.449634] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e7f301-6555-1a1c-1e27-fbb4e78c2dd3" [ 2035.449634] env[62684]: _type = "Task" [ 2035.449634] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2035.458894] env[62684]: DEBUG oslo_vmware.api [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e7f301-6555-1a1c-1e27-fbb4e78c2dd3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2035.462525] env[62684]: DEBUG nova.policy [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0c9327f7394249948899bf76e1837d36', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7855def9d0aa49abb7003ee504b9ccaf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2035.467696] env[62684]: DEBUG nova.network.neutron [-] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2035.690620] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a64161c-c18d-4b6e-8a23-27807fb5b86c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.698646] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5beb4ab0-5d7f-42cc-81a3-2f73478222da {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.729695] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d7b0ccb-3bed-45fa-8782-8cc9ac6349c7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.732368] env[62684]: DEBUG nova.network.neutron [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Successfully created port: 77fad839-d56b-4f69-ae2c-c846fad13348 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2035.739585] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a8f0664-13a3-4b71-810b-ac863ea87f1a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.744164] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b5219080-cfc7-4054-885a-5ec15cf47ca5 tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2035.753857] env[62684]: DEBUG nova.compute.provider_tree [None req-bfa554ed-3864-4355-bce9-51144807ae94 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2035.764644] env[62684]: DEBUG oslo_vmware.api [None req-020c6c93-236b-45a3-9109-e23fbaf1dfa5 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053120, 'name': PowerOffVM_Task, 'duration_secs': 0.183551} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2035.764906] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-020c6c93-236b-45a3-9109-e23fbaf1dfa5 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2035.765089] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-020c6c93-236b-45a3-9109-e23fbaf1dfa5 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2035.765327] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-30de60ad-bf9c-4be8-abc3-8a39e9178e36 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.891635] env[62684]: DEBUG nova.compute.manager [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2035.908221] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c68f7445-3103-4eb3-be7f-42b68b21cab5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.917471] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ec98641-fa9a-49c5-9433-c9a7541ee564 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.951606] env[62684]: DEBUG nova.compute.manager [req-12fbe69b-b7a6-4869-8d93-dda4220bdef2 req-07824e44-ec67-4a0f-bf93-ef8a907f9820 service nova] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Detach interface failed, port_id=10023d3d-f0cd-49c9-984f-fb3f2af83e3b, reason: Instance 0676806b-c1f0-4c1a-a12d-add2edf1588f could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2035.960289] env[62684]: DEBUG oslo_vmware.api [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e7f301-6555-1a1c-1e27-fbb4e78c2dd3, 'name': SearchDatastore_Task, 'duration_secs': 0.009855} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2035.960540] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2035.960832] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 380a804e-e1bf-4efa-8bb8-213733778927/380a804e-e1bf-4efa-8bb8-213733778927.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2035.961117] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-705cbdec-76f5-4f8d-8e35-d2ad6865f228 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.968812] env[62684]: DEBUG oslo_vmware.api [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Waiting for the task: (returnval){ [ 2035.968812] env[62684]: value = "task-2053122" [ 2035.968812] env[62684]: _type = "Task" [ 2035.968812] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2035.971748] env[62684]: INFO nova.compute.manager [-] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Took 1.25 seconds to deallocate network for instance. [ 2035.980041] env[62684]: DEBUG oslo_vmware.api [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': task-2053122, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2036.124837] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-020c6c93-236b-45a3-9109-e23fbaf1dfa5 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2036.125072] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-020c6c93-236b-45a3-9109-e23fbaf1dfa5 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2036.125338] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-020c6c93-236b-45a3-9109-e23fbaf1dfa5 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Deleting the datastore file [datastore1] a1b7c2a7-f21d-41f4-9102-e656b8205e1f {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2036.125681] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eccc6093-0e53-4934-abe7-2b6ba50c08fb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.133891] env[62684]: DEBUG oslo_vmware.api [None req-020c6c93-236b-45a3-9109-e23fbaf1dfa5 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2036.133891] env[62684]: value = "task-2053123" [ 2036.133891] env[62684]: _type = "Task" [ 2036.133891] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2036.143293] env[62684]: DEBUG oslo_vmware.api [None req-020c6c93-236b-45a3-9109-e23fbaf1dfa5 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053123, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2036.256907] env[62684]: DEBUG nova.scheduler.client.report [None req-bfa554ed-3864-4355-bce9-51144807ae94 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2036.480135] env[62684]: DEBUG oslo_vmware.api [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': task-2053122, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.442341} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2036.480473] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 380a804e-e1bf-4efa-8bb8-213733778927/380a804e-e1bf-4efa-8bb8-213733778927.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2036.480676] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2036.481818] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0669d8eb-4c91-4780-aa5f-1f6b840f08e5 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2036.481897] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ceaeec7f-1e0e-418c-99bd-8d66bce46401 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.489069] env[62684]: DEBUG oslo_vmware.api [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Waiting for the task: (returnval){ [ 2036.489069] env[62684]: value = "task-2053124" [ 2036.489069] env[62684]: _type = "Task" [ 2036.489069] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2036.498140] env[62684]: DEBUG oslo_vmware.api [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': task-2053124, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2036.646347] env[62684]: DEBUG oslo_vmware.api [None req-020c6c93-236b-45a3-9109-e23fbaf1dfa5 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053123, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.347317} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2036.646709] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-020c6c93-236b-45a3-9109-e23fbaf1dfa5 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2036.646973] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-020c6c93-236b-45a3-9109-e23fbaf1dfa5 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2036.647197] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-020c6c93-236b-45a3-9109-e23fbaf1dfa5 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2036.647424] env[62684]: INFO nova.compute.manager [None req-020c6c93-236b-45a3-9109-e23fbaf1dfa5 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Took 1.41 seconds to destroy the instance on the hypervisor. [ 2036.647713] env[62684]: DEBUG oslo.service.loopingcall [None req-020c6c93-236b-45a3-9109-e23fbaf1dfa5 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2036.647920] env[62684]: DEBUG nova.compute.manager [-] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2036.648037] env[62684]: DEBUG nova.network.neutron [-] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2036.762273] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bfa554ed-3864-4355-bce9-51144807ae94 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.874s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2036.764576] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0e65816a-b593-4142-ba71-bc3bcb9b7142 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.274s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2036.764845] env[62684]: DEBUG nova.objects.instance [None req-0e65816a-b593-4142-ba71-bc3bcb9b7142 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Lazy-loading 'resources' on Instance uuid 7b29207a-7fa8-4374-819e-c046b2014969 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2036.784554] env[62684]: INFO nova.scheduler.client.report [None req-bfa554ed-3864-4355-bce9-51144807ae94 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Deleted allocations for instance dcb0a5b2-379e-44ff-a9b0-be615943c94e [ 2036.900998] env[62684]: DEBUG nova.compute.manager [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2036.928594] env[62684]: DEBUG nova.virt.hardware [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2036.929053] env[62684]: DEBUG nova.virt.hardware [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2036.929053] env[62684]: DEBUG nova.virt.hardware [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2036.929211] env[62684]: DEBUG nova.virt.hardware [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2036.929361] env[62684]: DEBUG nova.virt.hardware [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2036.929511] env[62684]: DEBUG nova.virt.hardware [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2036.929721] env[62684]: DEBUG nova.virt.hardware [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2036.929889] env[62684]: DEBUG nova.virt.hardware [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2036.930163] env[62684]: DEBUG nova.virt.hardware [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2036.930419] env[62684]: DEBUG nova.virt.hardware [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2036.930640] env[62684]: DEBUG nova.virt.hardware [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2036.931821] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a73c5451-7b7d-4d63-a8b1-27b9fd9ee610 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.941071] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84ed5fc0-9f3c-410d-b9bc-9e0e8c2a8eef {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.945742] env[62684]: DEBUG nova.compute.manager [req-46eaae3d-8324-49f3-b812-043fcf02bce2 req-8faadfcf-954a-4f1a-afa0-20f234839049 service nova] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Received event network-vif-deleted-b509e8a7-cc45-43eb-abb5-1042ccb0b992 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2036.945899] env[62684]: INFO nova.compute.manager [req-46eaae3d-8324-49f3-b812-043fcf02bce2 req-8faadfcf-954a-4f1a-afa0-20f234839049 service nova] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Neutron deleted interface b509e8a7-cc45-43eb-abb5-1042ccb0b992; detaching it from the instance and deleting it from the info cache [ 2036.946089] env[62684]: DEBUG nova.network.neutron [req-46eaae3d-8324-49f3-b812-043fcf02bce2 req-8faadfcf-954a-4f1a-afa0-20f234839049 service nova] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2036.999153] env[62684]: DEBUG oslo_vmware.api [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': task-2053124, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061252} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2037.000043] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2037.000223] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a5bc480-5530-46dd-b6b6-4f5cf449926a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.024225] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] 380a804e-e1bf-4efa-8bb8-213733778927/380a804e-e1bf-4efa-8bb8-213733778927.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2037.024869] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9e5c3f51-97ae-4aaa-87b7-0c082d7d7ba1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.045861] env[62684]: DEBUG oslo_vmware.api [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Waiting for the task: (returnval){ [ 2037.045861] env[62684]: value = "task-2053125" [ 2037.045861] env[62684]: _type = "Task" [ 2037.045861] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2037.053852] env[62684]: DEBUG oslo_vmware.api [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': task-2053125, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2037.225393] env[62684]: DEBUG nova.network.neutron [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Successfully updated port: 77fad839-d56b-4f69-ae2c-c846fad13348 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2037.292769] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bfa554ed-3864-4355-bce9-51144807ae94 tempest-ServersAdminTestJSON-492707250 tempest-ServersAdminTestJSON-492707250-project-member] Lock "dcb0a5b2-379e-44ff-a9b0-be615943c94e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.385s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2037.419712] env[62684]: DEBUG nova.network.neutron [-] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2037.433697] env[62684]: DEBUG nova.compute.manager [req-fb1ff5a0-e232-40b9-829e-d7655ec42603 req-f2982a1d-a061-451e-a4ea-613f792c90bd service nova] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Received event network-vif-plugged-77fad839-d56b-4f69-ae2c-c846fad13348 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2037.433936] env[62684]: DEBUG oslo_concurrency.lockutils [req-fb1ff5a0-e232-40b9-829e-d7655ec42603 req-f2982a1d-a061-451e-a4ea-613f792c90bd service nova] Acquiring lock "41da0c18-dd9c-49bb-8b0d-a907575ee22e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2037.436264] env[62684]: DEBUG oslo_concurrency.lockutils [req-fb1ff5a0-e232-40b9-829e-d7655ec42603 req-f2982a1d-a061-451e-a4ea-613f792c90bd service nova] Lock "41da0c18-dd9c-49bb-8b0d-a907575ee22e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2037.436630] env[62684]: DEBUG oslo_concurrency.lockutils [req-fb1ff5a0-e232-40b9-829e-d7655ec42603 req-f2982a1d-a061-451e-a4ea-613f792c90bd service nova] Lock "41da0c18-dd9c-49bb-8b0d-a907575ee22e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2037.436941] env[62684]: DEBUG nova.compute.manager [req-fb1ff5a0-e232-40b9-829e-d7655ec42603 req-f2982a1d-a061-451e-a4ea-613f792c90bd service nova] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] No waiting events found dispatching network-vif-plugged-77fad839-d56b-4f69-ae2c-c846fad13348 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2037.437153] env[62684]: WARNING nova.compute.manager [req-fb1ff5a0-e232-40b9-829e-d7655ec42603 req-f2982a1d-a061-451e-a4ea-613f792c90bd service nova] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Received unexpected event network-vif-plugged-77fad839-d56b-4f69-ae2c-c846fad13348 for instance with vm_state building and task_state spawning. [ 2037.437329] env[62684]: DEBUG nova.compute.manager [req-fb1ff5a0-e232-40b9-829e-d7655ec42603 req-f2982a1d-a061-451e-a4ea-613f792c90bd service nova] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Received event network-changed-77fad839-d56b-4f69-ae2c-c846fad13348 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2037.437492] env[62684]: DEBUG nova.compute.manager [req-fb1ff5a0-e232-40b9-829e-d7655ec42603 req-f2982a1d-a061-451e-a4ea-613f792c90bd service nova] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Refreshing instance network info cache due to event network-changed-77fad839-d56b-4f69-ae2c-c846fad13348. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2037.437689] env[62684]: DEBUG oslo_concurrency.lockutils [req-fb1ff5a0-e232-40b9-829e-d7655ec42603 req-f2982a1d-a061-451e-a4ea-613f792c90bd service nova] Acquiring lock "refresh_cache-41da0c18-dd9c-49bb-8b0d-a907575ee22e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2037.437913] env[62684]: DEBUG oslo_concurrency.lockutils [req-fb1ff5a0-e232-40b9-829e-d7655ec42603 req-f2982a1d-a061-451e-a4ea-613f792c90bd service nova] Acquired lock "refresh_cache-41da0c18-dd9c-49bb-8b0d-a907575ee22e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2037.438105] env[62684]: DEBUG nova.network.neutron [req-fb1ff5a0-e232-40b9-829e-d7655ec42603 req-f2982a1d-a061-451e-a4ea-613f792c90bd service nova] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Refreshing network info cache for port 77fad839-d56b-4f69-ae2c-c846fad13348 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2037.448736] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-049dffa8-7f58-4dd5-86de-7df1aa573232 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.462142] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c7416bc-48c0-46a8-b081-ea17a078f8d8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.497173] env[62684]: DEBUG nova.compute.manager [req-46eaae3d-8324-49f3-b812-043fcf02bce2 req-8faadfcf-954a-4f1a-afa0-20f234839049 service nova] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Detach interface failed, port_id=b509e8a7-cc45-43eb-abb5-1042ccb0b992, reason: Instance a1b7c2a7-f21d-41f4-9102-e656b8205e1f could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2037.526271] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c821a51a-8733-4568-bab7-e556fef4cc7f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.533887] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de7f7a1d-97f1-408c-8eb0-42deaaa5c204 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.567483] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc010ea3-e36c-44a2-a344-0ff78cf16731 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.577833] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81d2723d-0856-4f97-9baf-7b80f52eb20c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.581526] env[62684]: DEBUG oslo_vmware.api [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': task-2053125, 'name': ReconfigVM_Task, 'duration_secs': 0.317372} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2037.581804] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Reconfigured VM instance instance-00000046 to attach disk [datastore1] 380a804e-e1bf-4efa-8bb8-213733778927/380a804e-e1bf-4efa-8bb8-213733778927.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2037.582736] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-64893519-ef8c-4acd-85e7-76753c297af5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.593849] env[62684]: DEBUG nova.compute.provider_tree [None req-0e65816a-b593-4142-ba71-bc3bcb9b7142 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2037.596465] env[62684]: DEBUG oslo_vmware.api [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Waiting for the task: (returnval){ [ 2037.596465] env[62684]: value = "task-2053126" [ 2037.596465] env[62684]: _type = "Task" [ 2037.596465] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2037.604138] env[62684]: DEBUG oslo_vmware.api [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': task-2053126, 'name': Rename_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2037.728279] env[62684]: DEBUG oslo_concurrency.lockutils [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "refresh_cache-41da0c18-dd9c-49bb-8b0d-a907575ee22e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2037.925572] env[62684]: INFO nova.compute.manager [-] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Took 1.28 seconds to deallocate network for instance. [ 2037.992292] env[62684]: DEBUG nova.network.neutron [req-fb1ff5a0-e232-40b9-829e-d7655ec42603 req-f2982a1d-a061-451e-a4ea-613f792c90bd service nova] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2038.109436] env[62684]: DEBUG oslo_vmware.api [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': task-2053126, 'name': Rename_Task, 'duration_secs': 0.13325} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2038.109724] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2038.109973] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c48a606b-1da0-4ed9-8bfe-55bc13c9cf78 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.116141] env[62684]: DEBUG oslo_vmware.api [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Waiting for the task: (returnval){ [ 2038.116141] env[62684]: value = "task-2053127" [ 2038.116141] env[62684]: _type = "Task" [ 2038.116141] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2038.127442] env[62684]: DEBUG oslo_vmware.api [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': task-2053127, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2038.128309] env[62684]: DEBUG nova.scheduler.client.report [None req-0e65816a-b593-4142-ba71-bc3bcb9b7142 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Updated inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f with generation 100 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2038.128555] env[62684]: DEBUG nova.compute.provider_tree [None req-0e65816a-b593-4142-ba71-bc3bcb9b7142 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Updating resource provider c23c281e-ec1f-4876-972e-a98655f2084f generation from 100 to 101 during operation: update_inventory {{(pid=62684) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2038.129124] env[62684]: DEBUG nova.compute.provider_tree [None req-0e65816a-b593-4142-ba71-bc3bcb9b7142 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2038.132759] env[62684]: DEBUG nova.network.neutron [req-fb1ff5a0-e232-40b9-829e-d7655ec42603 req-f2982a1d-a061-451e-a4ea-613f792c90bd service nova] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2038.433680] env[62684]: DEBUG oslo_concurrency.lockutils [None req-020c6c93-236b-45a3-9109-e23fbaf1dfa5 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2038.629387] env[62684]: DEBUG oslo_vmware.api [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': task-2053127, 'name': PowerOnVM_Task, 'duration_secs': 0.45874} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2038.629591] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2038.630464] env[62684]: INFO nova.compute.manager [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Took 9.84 seconds to spawn the instance on the hypervisor. [ 2038.630464] env[62684]: DEBUG nova.compute.manager [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2038.631874] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-440ea777-30ca-4b94-9865-99e6b9f9dfa1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.635142] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0e65816a-b593-4142-ba71-bc3bcb9b7142 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.871s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2038.637294] env[62684]: DEBUG oslo_concurrency.lockutils [req-fb1ff5a0-e232-40b9-829e-d7655ec42603 req-f2982a1d-a061-451e-a4ea-613f792c90bd service nova] Releasing lock "refresh_cache-41da0c18-dd9c-49bb-8b0d-a907575ee22e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2038.637895] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b5219080-cfc7-4054-885a-5ec15cf47ca5 tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.894s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2038.638745] env[62684]: DEBUG nova.objects.instance [None req-b5219080-cfc7-4054-885a-5ec15cf47ca5 tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Lazy-loading 'resources' on Instance uuid fcc937e3-163d-432b-a131-a53c002e5e8d {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2038.639885] env[62684]: DEBUG oslo_concurrency.lockutils [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquired lock "refresh_cache-41da0c18-dd9c-49bb-8b0d-a907575ee22e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2038.640095] env[62684]: DEBUG nova.network.neutron [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2038.663485] env[62684]: INFO nova.scheduler.client.report [None req-0e65816a-b593-4142-ba71-bc3bcb9b7142 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Deleted allocations for instance 7b29207a-7fa8-4374-819e-c046b2014969 [ 2039.160601] env[62684]: INFO nova.compute.manager [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Took 19.75 seconds to build instance. [ 2039.174991] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0e65816a-b593-4142-ba71-bc3bcb9b7142 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Lock "7b29207a-7fa8-4374-819e-c046b2014969" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.727s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2039.203061] env[62684]: DEBUG nova.network.neutron [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2039.437979] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b1005c0-ab6c-48a1-826e-d6f49f0823f6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.450142] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73db3e1c-0495-4162-b67f-8d54d4ad89ba {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.455281] env[62684]: DEBUG nova.network.neutron [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Updating instance_info_cache with network_info: [{"id": "77fad839-d56b-4f69-ae2c-c846fad13348", "address": "fa:16:3e:20:b6:7c", "network": {"id": "2fa98fa4-ff7c-44e6-add0-693f55fd4b03", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2019954029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7855def9d0aa49abb7003ee504b9ccaf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77fad839-d5", "ovs_interfaceid": "77fad839-d56b-4f69-ae2c-c846fad13348", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2039.491926] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20c8ed71-72ff-4108-a6a6-11f7cdc48373 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.505534] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc345ea1-a0fd-4aff-beb1-41793469235b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.520241] env[62684]: DEBUG nova.compute.provider_tree [None req-b5219080-cfc7-4054-885a-5ec15cf47ca5 tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2039.628118] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e9050893-8e5a-4b64-8c3e-47c76fb34455 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Acquiring lock "380a804e-e1bf-4efa-8bb8-213733778927" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2039.664668] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8fbed600-1798-4f3d-9ea4-1e0ffaf6dfbb tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Lock "380a804e-e1bf-4efa-8bb8-213733778927" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.262s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2039.664991] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e9050893-8e5a-4b64-8c3e-47c76fb34455 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Lock "380a804e-e1bf-4efa-8bb8-213733778927" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.037s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2039.665234] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e9050893-8e5a-4b64-8c3e-47c76fb34455 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Acquiring lock "380a804e-e1bf-4efa-8bb8-213733778927-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2039.665445] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e9050893-8e5a-4b64-8c3e-47c76fb34455 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Lock "380a804e-e1bf-4efa-8bb8-213733778927-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2039.665620] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e9050893-8e5a-4b64-8c3e-47c76fb34455 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Lock "380a804e-e1bf-4efa-8bb8-213733778927-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2039.668449] env[62684]: INFO nova.compute.manager [None req-e9050893-8e5a-4b64-8c3e-47c76fb34455 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Terminating instance [ 2039.671121] env[62684]: DEBUG nova.compute.manager [None req-e9050893-8e5a-4b64-8c3e-47c76fb34455 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2039.671332] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e9050893-8e5a-4b64-8c3e-47c76fb34455 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2039.672190] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a08955a7-25cd-4f09-86dd-cde0dfe169e4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.681385] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9050893-8e5a-4b64-8c3e-47c76fb34455 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2039.681665] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-29b819e9-642e-4be8-8b98-25043a805dca {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.687555] env[62684]: DEBUG oslo_vmware.api [None req-e9050893-8e5a-4b64-8c3e-47c76fb34455 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Waiting for the task: (returnval){ [ 2039.687555] env[62684]: value = "task-2053128" [ 2039.687555] env[62684]: _type = "Task" [ 2039.687555] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2039.695534] env[62684]: DEBUG oslo_vmware.api [None req-e9050893-8e5a-4b64-8c3e-47c76fb34455 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': task-2053128, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2039.958693] env[62684]: DEBUG oslo_concurrency.lockutils [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Releasing lock "refresh_cache-41da0c18-dd9c-49bb-8b0d-a907575ee22e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2039.959094] env[62684]: DEBUG nova.compute.manager [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Instance network_info: |[{"id": "77fad839-d56b-4f69-ae2c-c846fad13348", "address": "fa:16:3e:20:b6:7c", "network": {"id": "2fa98fa4-ff7c-44e6-add0-693f55fd4b03", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2019954029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7855def9d0aa49abb7003ee504b9ccaf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77fad839-d5", "ovs_interfaceid": "77fad839-d56b-4f69-ae2c-c846fad13348", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2039.959530] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:20:b6:7c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0cd5d325-3053-407e-a4ee-f627e82a23f9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '77fad839-d56b-4f69-ae2c-c846fad13348', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2039.968411] env[62684]: DEBUG oslo.service.loopingcall [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2039.968683] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2039.968884] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-732c60e4-cd6d-4fb1-a387-92e1c9764620 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.988335] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2039.988335] env[62684]: value = "task-2053129" [ 2039.988335] env[62684]: _type = "Task" [ 2039.988335] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2039.997725] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053129, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.022944] env[62684]: DEBUG nova.scheduler.client.report [None req-b5219080-cfc7-4054-885a-5ec15cf47ca5 tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2040.201178] env[62684]: DEBUG oslo_vmware.api [None req-e9050893-8e5a-4b64-8c3e-47c76fb34455 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': task-2053128, 'name': PowerOffVM_Task, 'duration_secs': 0.178846} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2040.201499] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9050893-8e5a-4b64-8c3e-47c76fb34455 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2040.201916] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e9050893-8e5a-4b64-8c3e-47c76fb34455 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2040.201916] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7fae383a-9792-4535-8a15-d8ff6e581dda {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.295395] env[62684]: DEBUG oslo_concurrency.lockutils [None req-47132580-c9a8-4d2e-a82f-b0f9a761c8c9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Acquiring lock "dab11b88-ac23-43f0-9203-024faf41e1f5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2040.295695] env[62684]: DEBUG oslo_concurrency.lockutils [None req-47132580-c9a8-4d2e-a82f-b0f9a761c8c9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Lock "dab11b88-ac23-43f0-9203-024faf41e1f5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2040.296127] env[62684]: DEBUG oslo_concurrency.lockutils [None req-47132580-c9a8-4d2e-a82f-b0f9a761c8c9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Acquiring lock "dab11b88-ac23-43f0-9203-024faf41e1f5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2040.296371] env[62684]: DEBUG oslo_concurrency.lockutils [None req-47132580-c9a8-4d2e-a82f-b0f9a761c8c9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Lock "dab11b88-ac23-43f0-9203-024faf41e1f5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2040.296568] env[62684]: DEBUG oslo_concurrency.lockutils [None req-47132580-c9a8-4d2e-a82f-b0f9a761c8c9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Lock "dab11b88-ac23-43f0-9203-024faf41e1f5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2040.299075] env[62684]: INFO nova.compute.manager [None req-47132580-c9a8-4d2e-a82f-b0f9a761c8c9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Terminating instance [ 2040.300928] env[62684]: DEBUG nova.compute.manager [None req-47132580-c9a8-4d2e-a82f-b0f9a761c8c9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2040.301138] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-47132580-c9a8-4d2e-a82f-b0f9a761c8c9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2040.302245] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c82206d6-5522-4109-a86b-e771f6870e23 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.310479] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-47132580-c9a8-4d2e-a82f-b0f9a761c8c9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2040.310730] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e3a530e4-caed-4a0a-9762-199700eb61ac {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.317272] env[62684]: DEBUG oslo_vmware.api [None req-47132580-c9a8-4d2e-a82f-b0f9a761c8c9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Waiting for the task: (returnval){ [ 2040.317272] env[62684]: value = "task-2053131" [ 2040.317272] env[62684]: _type = "Task" [ 2040.317272] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2040.325452] env[62684]: DEBUG oslo_vmware.api [None req-47132580-c9a8-4d2e-a82f-b0f9a761c8c9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': task-2053131, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.500423] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053129, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.529775] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b5219080-cfc7-4054-885a-5ec15cf47ca5 tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.892s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2040.532327] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0669d8eb-4c91-4780-aa5f-1f6b840f08e5 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.051s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2040.534603] env[62684]: DEBUG nova.objects.instance [None req-0669d8eb-4c91-4780-aa5f-1f6b840f08e5 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Lazy-loading 'resources' on Instance uuid 0676806b-c1f0-4c1a-a12d-add2edf1588f {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2040.566478] env[62684]: INFO nova.scheduler.client.report [None req-b5219080-cfc7-4054-885a-5ec15cf47ca5 tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Deleted allocations for instance fcc937e3-163d-432b-a131-a53c002e5e8d [ 2040.751318] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e9050893-8e5a-4b64-8c3e-47c76fb34455 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2040.751562] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e9050893-8e5a-4b64-8c3e-47c76fb34455 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2040.751753] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9050893-8e5a-4b64-8c3e-47c76fb34455 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Deleting the datastore file [datastore1] 380a804e-e1bf-4efa-8bb8-213733778927 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2040.752100] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-76557b99-ddaa-433a-906d-72262f486fd3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.758278] env[62684]: DEBUG oslo_vmware.api [None req-e9050893-8e5a-4b64-8c3e-47c76fb34455 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Waiting for the task: (returnval){ [ 2040.758278] env[62684]: value = "task-2053132" [ 2040.758278] env[62684]: _type = "Task" [ 2040.758278] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2040.766138] env[62684]: DEBUG oslo_vmware.api [None req-e9050893-8e5a-4b64-8c3e-47c76fb34455 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': task-2053132, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.828348] env[62684]: DEBUG oslo_vmware.api [None req-47132580-c9a8-4d2e-a82f-b0f9a761c8c9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': task-2053131, 'name': PowerOffVM_Task, 'duration_secs': 0.208056} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2040.828622] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-47132580-c9a8-4d2e-a82f-b0f9a761c8c9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2040.828798] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-47132580-c9a8-4d2e-a82f-b0f9a761c8c9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2040.829095] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-44b60da9-be71-4676-98fb-38caa4615735 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.959267] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-47132580-c9a8-4d2e-a82f-b0f9a761c8c9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2040.959502] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-47132580-c9a8-4d2e-a82f-b0f9a761c8c9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2040.959697] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-47132580-c9a8-4d2e-a82f-b0f9a761c8c9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Deleting the datastore file [datastore2] dab11b88-ac23-43f0-9203-024faf41e1f5 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2040.959999] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fe4cf1ee-f3f6-42a0-9936-68f3869fc34e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.966734] env[62684]: DEBUG oslo_vmware.api [None req-47132580-c9a8-4d2e-a82f-b0f9a761c8c9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Waiting for the task: (returnval){ [ 2040.966734] env[62684]: value = "task-2053134" [ 2040.966734] env[62684]: _type = "Task" [ 2040.966734] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2040.974479] env[62684]: DEBUG oslo_vmware.api [None req-47132580-c9a8-4d2e-a82f-b0f9a761c8c9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': task-2053134, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.998734] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053129, 'name': CreateVM_Task, 'duration_secs': 0.540584} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2040.998912] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2040.999644] env[62684]: DEBUG oslo_concurrency.lockutils [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2040.999816] env[62684]: DEBUG oslo_concurrency.lockutils [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2041.000201] env[62684]: DEBUG oslo_concurrency.lockutils [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2041.000493] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c85afb56-64d7-4387-9f10-2fe6d04de0a5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.005154] env[62684]: DEBUG oslo_vmware.api [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 2041.005154] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b673b3-fc81-b39c-99eb-864256eea98f" [ 2041.005154] env[62684]: _type = "Task" [ 2041.005154] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2041.013242] env[62684]: DEBUG oslo_vmware.api [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b673b3-fc81-b39c-99eb-864256eea98f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2041.075226] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b5219080-cfc7-4054-885a-5ec15cf47ca5 tempest-VolumesAdminNegativeTest-852175326 tempest-VolumesAdminNegativeTest-852175326-project-member] Lock "fcc937e3-163d-432b-a131-a53c002e5e8d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.380s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2041.268362] env[62684]: DEBUG oslo_vmware.api [None req-e9050893-8e5a-4b64-8c3e-47c76fb34455 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Task: {'id': task-2053132, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.237618} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2041.270901] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9050893-8e5a-4b64-8c3e-47c76fb34455 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2041.271124] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e9050893-8e5a-4b64-8c3e-47c76fb34455 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2041.271313] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e9050893-8e5a-4b64-8c3e-47c76fb34455 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2041.271494] env[62684]: INFO nova.compute.manager [None req-e9050893-8e5a-4b64-8c3e-47c76fb34455 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Took 1.60 seconds to destroy the instance on the hypervisor. [ 2041.271743] env[62684]: DEBUG oslo.service.loopingcall [None req-e9050893-8e5a-4b64-8c3e-47c76fb34455 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2041.272157] env[62684]: DEBUG nova.compute.manager [-] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2041.272264] env[62684]: DEBUG nova.network.neutron [-] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2041.300906] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b256ea34-0a65-40b4-8a34-b0dafdfa3bde {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.308674] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-180cafc4-817b-4e6b-b715-c01d74574cc8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.344506] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35417c61-207c-4e2c-af13-60a290a72184 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.352988] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d44d8d7-3e02-4e88-9acd-ad64fb51b8ed {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.368267] env[62684]: DEBUG nova.compute.provider_tree [None req-0669d8eb-4c91-4780-aa5f-1f6b840f08e5 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2041.477535] env[62684]: DEBUG oslo_vmware.api [None req-47132580-c9a8-4d2e-a82f-b0f9a761c8c9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Task: {'id': task-2053134, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165641} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2041.477817] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-47132580-c9a8-4d2e-a82f-b0f9a761c8c9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2041.478063] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-47132580-c9a8-4d2e-a82f-b0f9a761c8c9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2041.478260] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-47132580-c9a8-4d2e-a82f-b0f9a761c8c9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2041.478442] env[62684]: INFO nova.compute.manager [None req-47132580-c9a8-4d2e-a82f-b0f9a761c8c9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Took 1.18 seconds to destroy the instance on the hypervisor. [ 2041.478686] env[62684]: DEBUG oslo.service.loopingcall [None req-47132580-c9a8-4d2e-a82f-b0f9a761c8c9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2041.478884] env[62684]: DEBUG nova.compute.manager [-] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2041.478984] env[62684]: DEBUG nova.network.neutron [-] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2041.517477] env[62684]: DEBUG oslo_vmware.api [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b673b3-fc81-b39c-99eb-864256eea98f, 'name': SearchDatastore_Task, 'duration_secs': 0.009855} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2041.517802] env[62684]: DEBUG oslo_concurrency.lockutils [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2041.518050] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2041.518289] env[62684]: DEBUG oslo_concurrency.lockutils [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2041.518446] env[62684]: DEBUG oslo_concurrency.lockutils [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2041.518664] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2041.518947] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-70e32e0b-c72c-407b-89c3-c4cd65f4bba9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.532262] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2041.532262] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2041.532262] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bed86801-eb08-4d8b-b57f-2bc5da523a46 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.536365] env[62684]: DEBUG oslo_vmware.api [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 2041.536365] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f89a12-9d56-9302-208b-c86e53430fb5" [ 2041.536365] env[62684]: _type = "Task" [ 2041.536365] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2041.544645] env[62684]: DEBUG oslo_vmware.api [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f89a12-9d56-9302-208b-c86e53430fb5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2041.694095] env[62684]: DEBUG nova.compute.manager [req-ddba7654-88d9-432a-82b5-db5719daa14d req-d33b7378-7b24-4b06-9b72-25c1d6ebfaa3 service nova] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Received event network-vif-deleted-e5c6aaf0-1581-4501-b063-e63ccb14fa99 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2041.694095] env[62684]: INFO nova.compute.manager [req-ddba7654-88d9-432a-82b5-db5719daa14d req-d33b7378-7b24-4b06-9b72-25c1d6ebfaa3 service nova] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Neutron deleted interface e5c6aaf0-1581-4501-b063-e63ccb14fa99; detaching it from the instance and deleting it from the info cache [ 2041.694095] env[62684]: DEBUG nova.network.neutron [req-ddba7654-88d9-432a-82b5-db5719daa14d req-d33b7378-7b24-4b06-9b72-25c1d6ebfaa3 service nova] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Updating instance_info_cache with network_info: [{"id": "a7af2687-2eb0-4853-8106-40d1a00c14e2", "address": "fa:16:3e:2b:5f:35", "network": {"id": "a73819e1-7ffc-488f-8fb0-615e55ac8750", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1758094564", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.25", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "398aed99e10d457e9cadda3239b27831", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7af2687-2e", "ovs_interfaceid": "a7af2687-2eb0-4853-8106-40d1a00c14e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2041.857167] env[62684]: DEBUG nova.compute.manager [req-61512e3b-8161-4435-acc0-e6ea45f8fa46 req-a8e6f2cd-c3d1-4d33-a9a0-4e0f6b775dda service nova] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Received event network-vif-deleted-6da08bf6-6c5b-41a3-90e2-d17b27a734e4 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2041.857380] env[62684]: INFO nova.compute.manager [req-61512e3b-8161-4435-acc0-e6ea45f8fa46 req-a8e6f2cd-c3d1-4d33-a9a0-4e0f6b775dda service nova] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Neutron deleted interface 6da08bf6-6c5b-41a3-90e2-d17b27a734e4; detaching it from the instance and deleting it from the info cache [ 2041.857559] env[62684]: DEBUG nova.network.neutron [req-61512e3b-8161-4435-acc0-e6ea45f8fa46 req-a8e6f2cd-c3d1-4d33-a9a0-4e0f6b775dda service nova] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2041.872383] env[62684]: DEBUG nova.scheduler.client.report [None req-0669d8eb-4c91-4780-aa5f-1f6b840f08e5 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2042.051019] env[62684]: DEBUG oslo_vmware.api [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f89a12-9d56-9302-208b-c86e53430fb5, 'name': SearchDatastore_Task, 'duration_secs': 0.036028} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2042.051019] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0d2413d-610f-4529-9b8e-80d780700441 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.056885] env[62684]: DEBUG oslo_vmware.api [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 2042.056885] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521f62ff-b4c9-ead7-50bf-9a137be10696" [ 2042.056885] env[62684]: _type = "Task" [ 2042.056885] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2042.066058] env[62684]: DEBUG oslo_vmware.api [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521f62ff-b4c9-ead7-50bf-9a137be10696, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2042.201020] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9f605fc1-1906-4dea-9a1d-6ba1d5cd0e0e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.209371] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31a4c567-0e01-46fa-a4d1-8c670a28ffc9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.243852] env[62684]: DEBUG nova.compute.manager [req-ddba7654-88d9-432a-82b5-db5719daa14d req-d33b7378-7b24-4b06-9b72-25c1d6ebfaa3 service nova] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Detach interface failed, port_id=e5c6aaf0-1581-4501-b063-e63ccb14fa99, reason: Instance 380a804e-e1bf-4efa-8bb8-213733778927 could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2042.325972] env[62684]: DEBUG nova.network.neutron [-] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2042.351783] env[62684]: DEBUG nova.network.neutron [-] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2042.361209] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d81723c1-ba9c-46c5-8e40-2d662706fa49 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.373149] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95ff7932-a7a7-4952-81a5-726a7e3be489 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.387423] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0669d8eb-4c91-4780-aa5f-1f6b840f08e5 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.855s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2042.390222] env[62684]: DEBUG oslo_concurrency.lockutils [None req-020c6c93-236b-45a3-9109-e23fbaf1dfa5 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.957s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2042.390461] env[62684]: DEBUG nova.objects.instance [None req-020c6c93-236b-45a3-9109-e23fbaf1dfa5 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lazy-loading 'resources' on Instance uuid a1b7c2a7-f21d-41f4-9102-e656b8205e1f {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2042.417424] env[62684]: DEBUG nova.compute.manager [req-61512e3b-8161-4435-acc0-e6ea45f8fa46 req-a8e6f2cd-c3d1-4d33-a9a0-4e0f6b775dda service nova] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Detach interface failed, port_id=6da08bf6-6c5b-41a3-90e2-d17b27a734e4, reason: Instance dab11b88-ac23-43f0-9203-024faf41e1f5 could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2042.419323] env[62684]: INFO nova.scheduler.client.report [None req-0669d8eb-4c91-4780-aa5f-1f6b840f08e5 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Deleted allocations for instance 0676806b-c1f0-4c1a-a12d-add2edf1588f [ 2042.567944] env[62684]: DEBUG oslo_vmware.api [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521f62ff-b4c9-ead7-50bf-9a137be10696, 'name': SearchDatastore_Task, 'duration_secs': 0.010493} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2042.568289] env[62684]: DEBUG oslo_concurrency.lockutils [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2042.568559] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 41da0c18-dd9c-49bb-8b0d-a907575ee22e/41da0c18-dd9c-49bb-8b0d-a907575ee22e.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2042.568826] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-314d2c45-d80d-4bd1-aa15-3c9b9ae726a9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.575725] env[62684]: DEBUG oslo_vmware.api [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 2042.575725] env[62684]: value = "task-2053135" [ 2042.575725] env[62684]: _type = "Task" [ 2042.575725] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2042.583277] env[62684]: DEBUG oslo_vmware.api [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053135, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2042.829881] env[62684]: INFO nova.compute.manager [-] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Took 1.35 seconds to deallocate network for instance. [ 2042.854229] env[62684]: INFO nova.compute.manager [-] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Took 1.58 seconds to deallocate network for instance. [ 2042.928288] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0669d8eb-4c91-4780-aa5f-1f6b840f08e5 tempest-MigrationsAdminTest-991359366 tempest-MigrationsAdminTest-991359366-project-member] Lock "0676806b-c1f0-4c1a-a12d-add2edf1588f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.589s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2043.093729] env[62684]: DEBUG oslo_vmware.api [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053135, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.466908} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2043.094420] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 41da0c18-dd9c-49bb-8b0d-a907575ee22e/41da0c18-dd9c-49bb-8b0d-a907575ee22e.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2043.094786] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2043.095184] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0b8a5b00-e2b5-4c8e-a518-72885f71fd34 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.104948] env[62684]: DEBUG oslo_vmware.api [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 2043.104948] env[62684]: value = "task-2053136" [ 2043.104948] env[62684]: _type = "Task" [ 2043.104948] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2043.119720] env[62684]: DEBUG oslo_vmware.api [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053136, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2043.216100] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48f3c57b-7adc-450e-ad42-dc41a8453f07 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.227344] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76d80224-7d99-4943-81a0-fa7ecd3d4075 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.258631] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87e7f59a-ce6b-4bc5-bcc9-8a176e6babb0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.266884] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbbf92c1-1558-4ead-846c-cd8f448d2791 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.282773] env[62684]: DEBUG nova.compute.provider_tree [None req-020c6c93-236b-45a3-9109-e23fbaf1dfa5 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2043.337041] env[62684]: DEBUG oslo_concurrency.lockutils [None req-47132580-c9a8-4d2e-a82f-b0f9a761c8c9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2043.364596] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e9050893-8e5a-4b64-8c3e-47c76fb34455 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2043.618207] env[62684]: DEBUG oslo_vmware.api [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053136, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061531} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2043.618542] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2043.619379] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f5c73c6-c1ae-4a28-bc92-de0e160845f7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.641504] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] 41da0c18-dd9c-49bb-8b0d-a907575ee22e/41da0c18-dd9c-49bb-8b0d-a907575ee22e.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2043.641865] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a9fbe7da-c356-4d5d-8be2-775cdcb63e49 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.661684] env[62684]: DEBUG oslo_vmware.api [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 2043.661684] env[62684]: value = "task-2053137" [ 2043.661684] env[62684]: _type = "Task" [ 2043.661684] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2043.669642] env[62684]: DEBUG oslo_vmware.api [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053137, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2043.792726] env[62684]: DEBUG nova.compute.manager [req-03c8780a-8adc-40f0-a6f9-f295e61b4835 req-a14da5e8-6227-433e-9f73-4bb701779d54 service nova] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Received event network-vif-deleted-a7af2687-2eb0-4853-8106-40d1a00c14e2 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2043.832711] env[62684]: DEBUG nova.scheduler.client.report [None req-020c6c93-236b-45a3-9109-e23fbaf1dfa5 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Updated inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f with generation 101 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2043.833151] env[62684]: DEBUG nova.compute.provider_tree [None req-020c6c93-236b-45a3-9109-e23fbaf1dfa5 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Updating resource provider c23c281e-ec1f-4876-972e-a98655f2084f generation from 101 to 102 during operation: update_inventory {{(pid=62684) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2043.833409] env[62684]: DEBUG nova.compute.provider_tree [None req-020c6c93-236b-45a3-9109-e23fbaf1dfa5 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2044.173457] env[62684]: DEBUG oslo_vmware.api [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053137, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2044.340162] env[62684]: DEBUG oslo_concurrency.lockutils [None req-020c6c93-236b-45a3-9109-e23fbaf1dfa5 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.949s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2044.343030] env[62684]: DEBUG oslo_concurrency.lockutils [None req-47132580-c9a8-4d2e-a82f-b0f9a761c8c9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.006s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2044.343451] env[62684]: DEBUG nova.objects.instance [None req-47132580-c9a8-4d2e-a82f-b0f9a761c8c9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Lazy-loading 'resources' on Instance uuid dab11b88-ac23-43f0-9203-024faf41e1f5 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2044.380655] env[62684]: INFO nova.scheduler.client.report [None req-020c6c93-236b-45a3-9109-e23fbaf1dfa5 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Deleted allocations for instance a1b7c2a7-f21d-41f4-9102-e656b8205e1f [ 2044.423025] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fafe80ca-2d84-44c1-9b85-ae1abb7c01f8 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Acquiring lock "025dfe36-1f14-4bda-84a0-d424364b745b" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2044.423517] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fafe80ca-2d84-44c1-9b85-ae1abb7c01f8 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "025dfe36-1f14-4bda-84a0-d424364b745b" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2044.674885] env[62684]: DEBUG oslo_vmware.api [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053137, 'name': ReconfigVM_Task, 'duration_secs': 0.762505} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2044.674885] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Reconfigured VM instance instance-00000047 to attach disk [datastore1] 41da0c18-dd9c-49bb-8b0d-a907575ee22e/41da0c18-dd9c-49bb-8b0d-a907575ee22e.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2044.674885] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c7bf8dc4-4428-4d28-a4d4-e21849e067da {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.680413] env[62684]: DEBUG oslo_vmware.api [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 2044.680413] env[62684]: value = "task-2053138" [ 2044.680413] env[62684]: _type = "Task" [ 2044.680413] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2044.688644] env[62684]: DEBUG oslo_vmware.api [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053138, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2044.892639] env[62684]: DEBUG oslo_concurrency.lockutils [None req-020c6c93-236b-45a3-9109-e23fbaf1dfa5 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "a1b7c2a7-f21d-41f4-9102-e656b8205e1f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.663s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2044.926564] env[62684]: DEBUG nova.compute.utils [None req-fafe80ca-2d84-44c1-9b85-ae1abb7c01f8 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2045.080805] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb34fee9-e4e6-438a-88eb-06d50c2b52b1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.088974] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-905abb97-51a1-4cc2-9ed5-462628518d2e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.120845] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f7f4861-adc2-4303-8d43-bc0e462d3f70 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.129926] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7316307-b234-4dc7-a4f7-cac33b13dc56 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.144629] env[62684]: DEBUG nova.compute.provider_tree [None req-47132580-c9a8-4d2e-a82f-b0f9a761c8c9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2045.190393] env[62684]: DEBUG oslo_vmware.api [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053138, 'name': Rename_Task, 'duration_secs': 0.14107} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2045.190674] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2045.190921] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-207b2db5-5aab-412f-ba5f-26c59e408433 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.196834] env[62684]: DEBUG oslo_vmware.api [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 2045.196834] env[62684]: value = "task-2053139" [ 2045.196834] env[62684]: _type = "Task" [ 2045.196834] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2045.206333] env[62684]: DEBUG oslo_vmware.api [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053139, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2045.429444] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fafe80ca-2d84-44c1-9b85-ae1abb7c01f8 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "025dfe36-1f14-4bda-84a0-d424364b745b" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2045.648288] env[62684]: DEBUG nova.scheduler.client.report [None req-47132580-c9a8-4d2e-a82f-b0f9a761c8c9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2045.707345] env[62684]: DEBUG oslo_vmware.api [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053139, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2046.155086] env[62684]: DEBUG oslo_concurrency.lockutils [None req-47132580-c9a8-4d2e-a82f-b0f9a761c8c9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.812s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2046.159759] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e9050893-8e5a-4b64-8c3e-47c76fb34455 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.794s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2046.159759] env[62684]: DEBUG nova.objects.instance [None req-e9050893-8e5a-4b64-8c3e-47c76fb34455 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Lazy-loading 'resources' on Instance uuid 380a804e-e1bf-4efa-8bb8-213733778927 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2046.190259] env[62684]: INFO nova.scheduler.client.report [None req-47132580-c9a8-4d2e-a82f-b0f9a761c8c9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Deleted allocations for instance dab11b88-ac23-43f0-9203-024faf41e1f5 [ 2046.211397] env[62684]: DEBUG oslo_vmware.api [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053139, 'name': PowerOnVM_Task, 'duration_secs': 0.634943} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2046.211757] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2046.212114] env[62684]: INFO nova.compute.manager [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Took 9.31 seconds to spawn the instance on the hypervisor. [ 2046.212114] env[62684]: DEBUG nova.compute.manager [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2046.214246] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82df907e-b7a4-4b55-a7be-329b28a3ad14 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.337879] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "42ae6edd-e1f5-4ef8-a248-8f02e94d798e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2046.338175] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "42ae6edd-e1f5-4ef8-a248-8f02e94d798e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2046.508215] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fafe80ca-2d84-44c1-9b85-ae1abb7c01f8 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Acquiring lock "025dfe36-1f14-4bda-84a0-d424364b745b" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2046.508215] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fafe80ca-2d84-44c1-9b85-ae1abb7c01f8 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "025dfe36-1f14-4bda-84a0-d424364b745b" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2046.508215] env[62684]: INFO nova.compute.manager [None req-fafe80ca-2d84-44c1-9b85-ae1abb7c01f8 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Attaching volume 7b18a98f-a692-4a68-9a65-227928ad0562 to /dev/sdb [ 2046.553128] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01de583d-a750-4af4-a3ed-43f9e9938e3d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.561200] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c865bb20-0e2e-49d3-ae7c-d5b840a6272f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.576054] env[62684]: DEBUG nova.virt.block_device [None req-fafe80ca-2d84-44c1-9b85-ae1abb7c01f8 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Updating existing volume attachment record: 8c874f17-c41c-4416-9245-b4e69c12d063 {{(pid=62684) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2046.700968] env[62684]: DEBUG oslo_concurrency.lockutils [None req-47132580-c9a8-4d2e-a82f-b0f9a761c8c9 tempest-SecurityGroupsTestJSON-1920703525 tempest-SecurityGroupsTestJSON-1920703525-project-member] Lock "dab11b88-ac23-43f0-9203-024faf41e1f5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.405s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2046.735539] env[62684]: INFO nova.compute.manager [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Took 23.71 seconds to build instance. [ 2046.840839] env[62684]: DEBUG nova.compute.manager [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2046.945096] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86473f3b-1b9a-47e9-a5b5-3c21adf5ed0a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.952446] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c2e2797-9eb2-45e2-9b31-b1b01394f7cf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.989383] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44f326d0-4fc3-4044-aab7-3de68573630b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.997537] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82e2b048-301f-4a9c-960f-1f6db3cf0998 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.011792] env[62684]: DEBUG nova.compute.provider_tree [None req-e9050893-8e5a-4b64-8c3e-47c76fb34455 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2047.237874] env[62684]: DEBUG oslo_concurrency.lockutils [None req-95569c0c-55cb-4d0e-b16d-a2ac6694885e tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "41da0c18-dd9c-49bb-8b0d-a907575ee22e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.233s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2047.365922] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2047.515809] env[62684]: DEBUG nova.scheduler.client.report [None req-e9050893-8e5a-4b64-8c3e-47c76fb34455 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2048.025549] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e9050893-8e5a-4b64-8c3e-47c76fb34455 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.867s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2048.029928] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.662s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2048.036010] env[62684]: INFO nova.compute.claims [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2048.064592] env[62684]: INFO nova.scheduler.client.report [None req-e9050893-8e5a-4b64-8c3e-47c76fb34455 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Deleted allocations for instance 380a804e-e1bf-4efa-8bb8-213733778927 [ 2048.575784] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e9050893-8e5a-4b64-8c3e-47c76fb34455 tempest-ServersTestMultiNic-723597991 tempest-ServersTestMultiNic-723597991-project-member] Lock "380a804e-e1bf-4efa-8bb8-213733778927" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.911s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2049.000752] env[62684]: DEBUG nova.compute.manager [None req-3a5ef782-2266-43e7-b8ee-7948bc109898 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2049.001775] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8c9881d-308d-4b80-8b0b-483b7fb456b8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.338097] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88d97581-29e1-4e3b-85ee-f86988de321f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.347108] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a3ac0ec-63a3-4fa7-879a-276402c530fc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.384613] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d66978a-4227-4778-9c7c-7a58f4db561d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.394373] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3824dd11-c1d1-455d-b060-7bb6e90ae1fd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.416410] env[62684]: DEBUG nova.compute.provider_tree [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2049.490637] env[62684]: DEBUG oslo_concurrency.lockutils [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Acquiring lock "feca8680-4baa-4b2c-9875-69a88b351dc0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2049.491920] env[62684]: DEBUG oslo_concurrency.lockutils [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Lock "feca8680-4baa-4b2c-9875-69a88b351dc0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2049.491920] env[62684]: DEBUG oslo_concurrency.lockutils [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Acquiring lock "feca8680-4baa-4b2c-9875-69a88b351dc0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2049.491920] env[62684]: DEBUG oslo_concurrency.lockutils [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Lock "feca8680-4baa-4b2c-9875-69a88b351dc0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2049.491920] env[62684]: DEBUG oslo_concurrency.lockutils [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Lock "feca8680-4baa-4b2c-9875-69a88b351dc0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2049.493720] env[62684]: INFO nova.compute.manager [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Terminating instance [ 2049.495627] env[62684]: DEBUG nova.compute.manager [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2049.495752] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2049.496662] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdfff13a-4271-44f0-b970-c5ef13bf80af {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.507756] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2049.507756] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4ec32d6b-ca94-419d-81f6-067172923ba2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.514292] env[62684]: DEBUG oslo_vmware.api [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Waiting for the task: (returnval){ [ 2049.514292] env[62684]: value = "task-2053144" [ 2049.514292] env[62684]: _type = "Task" [ 2049.514292] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2049.523829] env[62684]: INFO nova.compute.manager [None req-3a5ef782-2266-43e7-b8ee-7948bc109898 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] instance snapshotting [ 2049.525591] env[62684]: DEBUG oslo_vmware.api [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': task-2053144, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2049.529020] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3dee350-25b5-4134-9302-5cff5a5a95bd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.550638] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42e093a4-8fa7-4f91-a399-c273be82e153 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.923027] env[62684]: DEBUG nova.scheduler.client.report [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2050.025633] env[62684]: DEBUG oslo_vmware.api [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': task-2053144, 'name': PowerOffVM_Task, 'duration_secs': 0.21396} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2050.025916] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2050.026108] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2050.026377] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5c253a91-1c81-4cc0-96cc-7b2730e4b629 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.049088] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Acquiring lock "845b2e2a-cee0-4598-afbd-1f07aa52468f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2050.049852] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Lock "845b2e2a-cee0-4598-afbd-1f07aa52468f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2050.063323] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-3a5ef782-2266-43e7-b8ee-7948bc109898 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Creating Snapshot of the VM instance {{(pid=62684) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2050.063750] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-9a576bba-8e81-4f49-abce-ad77f78b163e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.071894] env[62684]: DEBUG oslo_vmware.api [None req-3a5ef782-2266-43e7-b8ee-7948bc109898 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 2050.071894] env[62684]: value = "task-2053146" [ 2050.071894] env[62684]: _type = "Task" [ 2050.071894] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2050.086051] env[62684]: DEBUG oslo_vmware.api [None req-3a5ef782-2266-43e7-b8ee-7948bc109898 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053146, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2050.429140] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.401s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2050.429601] env[62684]: DEBUG nova.compute.manager [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2050.552638] env[62684]: DEBUG nova.compute.manager [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2050.584994] env[62684]: DEBUG oslo_vmware.api [None req-3a5ef782-2266-43e7-b8ee-7948bc109898 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053146, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2050.937257] env[62684]: DEBUG nova.compute.utils [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2050.938430] env[62684]: DEBUG nova.compute.manager [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2050.938534] env[62684]: DEBUG nova.network.neutron [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2050.959230] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Acquiring lock "9964237b-db9b-49cc-a9bd-d62329ea564e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2050.959745] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Lock "9964237b-db9b-49cc-a9bd-d62329ea564e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2050.997525] env[62684]: DEBUG nova.policy [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6e8b54745b53458eafe4d911d7d6d7d1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6c54f74085f343d2b790145b0d82a9f8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2051.089465] env[62684]: DEBUG oslo_vmware.api [None req-3a5ef782-2266-43e7-b8ee-7948bc109898 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053146, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2051.093691] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2051.094490] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2051.097075] env[62684]: INFO nova.compute.claims [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2051.135510] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-fafe80ca-2d84-44c1-9b85-ae1abb7c01f8 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Volume attach. Driver type: vmdk {{(pid=62684) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2051.135851] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-fafe80ca-2d84-44c1-9b85-ae1abb7c01f8 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421315', 'volume_id': '7b18a98f-a692-4a68-9a65-227928ad0562', 'name': 'volume-7b18a98f-a692-4a68-9a65-227928ad0562', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '025dfe36-1f14-4bda-84a0-d424364b745b', 'attached_at': '', 'detached_at': '', 'volume_id': '7b18a98f-a692-4a68-9a65-227928ad0562', 'serial': '7b18a98f-a692-4a68-9a65-227928ad0562'} {{(pid=62684) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2051.136692] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c695ed0e-7621-4729-b8e4-e2f8aab13f2c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.158293] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7568a0a1-90df-495d-bd99-a1ca10abd9f0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.185197] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-fafe80ca-2d84-44c1-9b85-ae1abb7c01f8 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Reconfiguring VM instance instance-00000029 to attach disk [datastore1] volume-7b18a98f-a692-4a68-9a65-227928ad0562/volume-7b18a98f-a692-4a68-9a65-227928ad0562.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2051.185977] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4b2c6c70-05bd-4763-87aa-7deda0e049e1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.205911] env[62684]: DEBUG oslo_vmware.api [None req-fafe80ca-2d84-44c1-9b85-ae1abb7c01f8 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Waiting for the task: (returnval){ [ 2051.205911] env[62684]: value = "task-2053147" [ 2051.205911] env[62684]: _type = "Task" [ 2051.205911] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2051.221272] env[62684]: DEBUG oslo_vmware.api [None req-fafe80ca-2d84-44c1-9b85-ae1abb7c01f8 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053147, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2051.410291] env[62684]: DEBUG nova.network.neutron [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Successfully created port: 2fce07b0-060c-45c8-8466-125ebacf17b8 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2051.444572] env[62684]: DEBUG nova.compute.manager [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2051.463528] env[62684]: DEBUG nova.compute.manager [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 9964237b-db9b-49cc-a9bd-d62329ea564e] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2051.585672] env[62684]: DEBUG oslo_vmware.api [None req-3a5ef782-2266-43e7-b8ee-7948bc109898 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053146, 'name': CreateSnapshot_Task, 'duration_secs': 1.043903} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2051.586057] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-3a5ef782-2266-43e7-b8ee-7948bc109898 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Created Snapshot of the VM instance {{(pid=62684) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2051.586833] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75544f81-fd6a-420f-ad5a-435bab69acfb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.648294] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Acquiring lock "2f8f7e02-54fb-4275-badb-35c0b840ab33" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2051.648493] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Lock "2f8f7e02-54fb-4275-badb-35c0b840ab33" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2051.720389] env[62684]: DEBUG oslo_vmware.api [None req-fafe80ca-2d84-44c1-9b85-ae1abb7c01f8 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053147, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2051.850914] env[62684]: DEBUG oslo_concurrency.lockutils [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "983218ac-7cf3-48ef-88d8-aa9e9322df4b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2051.852239] env[62684]: DEBUG oslo_concurrency.lockutils [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "983218ac-7cf3-48ef-88d8-aa9e9322df4b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2051.998623] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2052.107709] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-3a5ef782-2266-43e7-b8ee-7948bc109898 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Creating linked-clone VM from snapshot {{(pid=62684) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2052.109992] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-cc3a76f0-1eda-4657-bc2c-fcae6eea7d9a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.121701] env[62684]: DEBUG oslo_vmware.api [None req-3a5ef782-2266-43e7-b8ee-7948bc109898 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 2052.121701] env[62684]: value = "task-2053148" [ 2052.121701] env[62684]: _type = "Task" [ 2052.121701] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2052.133977] env[62684]: DEBUG oslo_vmware.api [None req-3a5ef782-2266-43e7-b8ee-7948bc109898 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053148, 'name': CloneVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2052.154756] env[62684]: DEBUG nova.compute.manager [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2052.219941] env[62684]: DEBUG oslo_vmware.api [None req-fafe80ca-2d84-44c1-9b85-ae1abb7c01f8 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053147, 'name': ReconfigVM_Task, 'duration_secs': 0.773224} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2052.222725] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-fafe80ca-2d84-44c1-9b85-ae1abb7c01f8 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Reconfigured VM instance instance-00000029 to attach disk [datastore1] volume-7b18a98f-a692-4a68-9a65-227928ad0562/volume-7b18a98f-a692-4a68-9a65-227928ad0562.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2052.234986] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-231c7438-a80d-40de-bab8-5a83ce5ebd02 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.259051] env[62684]: DEBUG oslo_vmware.api [None req-fafe80ca-2d84-44c1-9b85-ae1abb7c01f8 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Waiting for the task: (returnval){ [ 2052.259051] env[62684]: value = "task-2053149" [ 2052.259051] env[62684]: _type = "Task" [ 2052.259051] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2052.269473] env[62684]: DEBUG oslo_vmware.api [None req-fafe80ca-2d84-44c1-9b85-ae1abb7c01f8 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053149, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2052.357392] env[62684]: DEBUG nova.compute.manager [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2052.464809] env[62684]: DEBUG nova.compute.manager [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2052.473317] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-500e771a-c147-4e95-a15d-43f0888447c0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.481728] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70c01e87-2b7f-428f-a74e-b8f3be11de2c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.515999] env[62684]: DEBUG nova.virt.hardware [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2052.516317] env[62684]: DEBUG nova.virt.hardware [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2052.516481] env[62684]: DEBUG nova.virt.hardware [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2052.516666] env[62684]: DEBUG nova.virt.hardware [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2052.516816] env[62684]: DEBUG nova.virt.hardware [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2052.516974] env[62684]: DEBUG nova.virt.hardware [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2052.517208] env[62684]: DEBUG nova.virt.hardware [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2052.517372] env[62684]: DEBUG nova.virt.hardware [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2052.517672] env[62684]: DEBUG nova.virt.hardware [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2052.517879] env[62684]: DEBUG nova.virt.hardware [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2052.518081] env[62684]: DEBUG nova.virt.hardware [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2052.518891] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-493494a5-0dcc-453b-bb16-64c2a084b43e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.521894] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f33f034e-422f-4681-9e11-f37b1ed2e585 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.530762] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-addc6827-393f-4ffb-8383-a026bc011d06 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.535678] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb03bec5-0527-4215-ad31-55ff496e033f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.549099] env[62684]: DEBUG nova.compute.provider_tree [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2052.559763] env[62684]: DEBUG nova.scheduler.client.report [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2052.632204] env[62684]: DEBUG oslo_vmware.api [None req-3a5ef782-2266-43e7-b8ee-7948bc109898 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053148, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2052.677566] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2052.769227] env[62684]: DEBUG oslo_vmware.api [None req-fafe80ca-2d84-44c1-9b85-ae1abb7c01f8 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053149, 'name': ReconfigVM_Task, 'duration_secs': 0.296351} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2052.769610] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-fafe80ca-2d84-44c1-9b85-ae1abb7c01f8 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421315', 'volume_id': '7b18a98f-a692-4a68-9a65-227928ad0562', 'name': 'volume-7b18a98f-a692-4a68-9a65-227928ad0562', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '025dfe36-1f14-4bda-84a0-d424364b745b', 'attached_at': '', 'detached_at': '', 'volume_id': '7b18a98f-a692-4a68-9a65-227928ad0562', 'serial': '7b18a98f-a692-4a68-9a65-227928ad0562'} {{(pid=62684) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2052.875926] env[62684]: DEBUG oslo_concurrency.lockutils [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2053.064825] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.971s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2053.065529] env[62684]: DEBUG nova.compute.manager [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2053.068579] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.070s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2053.072023] env[62684]: INFO nova.compute.claims [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 9964237b-db9b-49cc-a9bd-d62329ea564e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2053.132808] env[62684]: DEBUG oslo_vmware.api [None req-3a5ef782-2266-43e7-b8ee-7948bc109898 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053148, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2053.578502] env[62684]: DEBUG nova.compute.utils [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2053.582431] env[62684]: DEBUG nova.compute.manager [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2053.582607] env[62684]: DEBUG nova.network.neutron [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2053.633628] env[62684]: DEBUG oslo_vmware.api [None req-3a5ef782-2266-43e7-b8ee-7948bc109898 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053148, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2053.658276] env[62684]: DEBUG nova.policy [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fe6c69e19eaa46978e4fe25513f42c0d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '359bcaa2eeb64bcbb6602062777b852e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2053.820558] env[62684]: DEBUG nova.objects.instance [None req-fafe80ca-2d84-44c1-9b85-ae1abb7c01f8 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lazy-loading 'flavor' on Instance uuid 025dfe36-1f14-4bda-84a0-d424364b745b {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2053.936374] env[62684]: DEBUG nova.network.neutron [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Successfully created port: d01b5a43-4c06-4869-b3d4-b610699f6bb1 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2054.014008] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c383dd20-22a1-449c-85e6-ab88bbbab420 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Acquiring lock "025dfe36-1f14-4bda-84a0-d424364b745b" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2054.083430] env[62684]: DEBUG nova.compute.manager [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2054.139312] env[62684]: DEBUG oslo_vmware.api [None req-3a5ef782-2266-43e7-b8ee-7948bc109898 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053148, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2054.326521] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fafe80ca-2d84-44c1-9b85-ae1abb7c01f8 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "025dfe36-1f14-4bda-84a0-d424364b745b" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.819s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2054.327828] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c383dd20-22a1-449c-85e6-ab88bbbab420 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "025dfe36-1f14-4bda-84a0-d424364b745b" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.314s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2054.328048] env[62684]: DEBUG nova.compute.manager [None req-c383dd20-22a1-449c-85e6-ab88bbbab420 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2054.329605] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61d5d658-9f5f-4674-9072-5ede1a9387d2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.337403] env[62684]: DEBUG nova.compute.manager [None req-c383dd20-22a1-449c-85e6-ab88bbbab420 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62684) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 2054.341028] env[62684]: DEBUG nova.objects.instance [None req-c383dd20-22a1-449c-85e6-ab88bbbab420 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lazy-loading 'flavor' on Instance uuid 025dfe36-1f14-4bda-84a0-d424364b745b {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2054.382207] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87954729-2d3c-430a-8e0c-ddb765ee305e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.389961] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd385952-5fab-40e6-b373-d4d10085121f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.421396] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-537966a9-0162-43e7-ac55-a6b2a14f9df5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.429119] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5252194a-b88a-48b7-8fcb-f2966069c682 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.442317] env[62684]: DEBUG nova.compute.provider_tree [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2054.637042] env[62684]: DEBUG oslo_vmware.api [None req-3a5ef782-2266-43e7-b8ee-7948bc109898 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053148, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2054.846368] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-c383dd20-22a1-449c-85e6-ab88bbbab420 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2054.846673] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-de886259-5cc2-43c3-9909-12aceaef16f6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.856587] env[62684]: DEBUG oslo_vmware.api [None req-c383dd20-22a1-449c-85e6-ab88bbbab420 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Waiting for the task: (returnval){ [ 2054.856587] env[62684]: value = "task-2053150" [ 2054.856587] env[62684]: _type = "Task" [ 2054.856587] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2054.866242] env[62684]: DEBUG oslo_vmware.api [None req-c383dd20-22a1-449c-85e6-ab88bbbab420 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053150, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2054.945220] env[62684]: DEBUG nova.scheduler.client.report [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2055.093836] env[62684]: DEBUG nova.compute.manager [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2055.122769] env[62684]: DEBUG nova.virt.hardware [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2055.123203] env[62684]: DEBUG nova.virt.hardware [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2055.123447] env[62684]: DEBUG nova.virt.hardware [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2055.123949] env[62684]: DEBUG nova.virt.hardware [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2055.124145] env[62684]: DEBUG nova.virt.hardware [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2055.124290] env[62684]: DEBUG nova.virt.hardware [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2055.124506] env[62684]: DEBUG nova.virt.hardware [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2055.124814] env[62684]: DEBUG nova.virt.hardware [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2055.125053] env[62684]: DEBUG nova.virt.hardware [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2055.125244] env[62684]: DEBUG nova.virt.hardware [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2055.125428] env[62684]: DEBUG nova.virt.hardware [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2055.126657] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76052a82-fe11-4dca-8d91-6fbd05b06bc4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.139131] env[62684]: DEBUG oslo_vmware.api [None req-3a5ef782-2266-43e7-b8ee-7948bc109898 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053148, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2055.142553] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b57bb80-b5f6-4b54-83ee-780ec2c04c05 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.365159] env[62684]: DEBUG oslo_vmware.api [None req-c383dd20-22a1-449c-85e6-ab88bbbab420 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053150, 'name': PowerOffVM_Task, 'duration_secs': 0.202661} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2055.365522] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-c383dd20-22a1-449c-85e6-ab88bbbab420 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2055.365711] env[62684]: DEBUG nova.compute.manager [None req-c383dd20-22a1-449c-85e6-ab88bbbab420 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2055.366609] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bb00703-6e55-4ef2-a380-d92077ec1fa0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.451405] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.383s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2055.452018] env[62684]: DEBUG nova.compute.manager [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 9964237b-db9b-49cc-a9bd-d62329ea564e] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2055.454457] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.777s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2055.456402] env[62684]: INFO nova.compute.claims [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2055.640290] env[62684]: DEBUG oslo_vmware.api [None req-3a5ef782-2266-43e7-b8ee-7948bc109898 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053148, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2055.884100] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c383dd20-22a1-449c-85e6-ab88bbbab420 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "025dfe36-1f14-4bda-84a0-d424364b745b" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.556s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2055.955083] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Acquiring lock "2aac4230-2070-48be-b91a-5cb4218a0574" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2055.955083] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Lock "2aac4230-2070-48be-b91a-5cb4218a0574" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2055.961035] env[62684]: DEBUG nova.compute.utils [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2055.964374] env[62684]: DEBUG nova.compute.manager [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 9964237b-db9b-49cc-a9bd-d62329ea564e] Not allocating networking since 'none' was specified. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 2056.141600] env[62684]: DEBUG oslo_vmware.api [None req-3a5ef782-2266-43e7-b8ee-7948bc109898 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053148, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2056.458073] env[62684]: DEBUG nova.compute.manager [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2056.465130] env[62684]: DEBUG nova.compute.manager [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 9964237b-db9b-49cc-a9bd-d62329ea564e] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2056.542380] env[62684]: DEBUG nova.objects.instance [None req-c836c08b-93f6-4074-88b1-e203f31e81b2 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lazy-loading 'flavor' on Instance uuid 025dfe36-1f14-4bda-84a0-d424364b745b {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2056.643804] env[62684]: DEBUG oslo_vmware.api [None req-3a5ef782-2266-43e7-b8ee-7948bc109898 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053148, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2056.737604] env[62684]: DEBUG oslo_concurrency.lockutils [None req-709a49fe-5cc0-431c-824e-fb0efee4ec4a tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquiring lock "b4cd871a-30ea-4b7a-98ad-00b8676dc2cd" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2056.737822] env[62684]: DEBUG oslo_concurrency.lockutils [None req-709a49fe-5cc0-431c-824e-fb0efee4ec4a tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lock "b4cd871a-30ea-4b7a-98ad-00b8676dc2cd" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2056.816586] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38f87a53-afc0-47d1-8a09-82c375b8e918 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.823997] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-047fb1ff-be44-492f-b6e2-19e7d3a7f3f0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.855825] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bdf2935-abff-4cf5-b9ad-2001d0282485 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.863566] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c919e72c-39df-40d6-b84a-a7ca1c6658e8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.876875] env[62684]: DEBUG nova.compute.provider_tree [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2056.980050] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2057.049340] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c836c08b-93f6-4074-88b1-e203f31e81b2 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Acquiring lock "refresh_cache-025dfe36-1f14-4bda-84a0-d424364b745b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2057.049514] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c836c08b-93f6-4074-88b1-e203f31e81b2 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Acquired lock "refresh_cache-025dfe36-1f14-4bda-84a0-d424364b745b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2057.049690] env[62684]: DEBUG nova.network.neutron [None req-c836c08b-93f6-4074-88b1-e203f31e81b2 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2057.049868] env[62684]: DEBUG nova.objects.instance [None req-c836c08b-93f6-4074-88b1-e203f31e81b2 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lazy-loading 'info_cache' on Instance uuid 025dfe36-1f14-4bda-84a0-d424364b745b {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2057.141563] env[62684]: DEBUG oslo_vmware.api [None req-3a5ef782-2266-43e7-b8ee-7948bc109898 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053148, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2057.243395] env[62684]: INFO nova.compute.manager [None req-709a49fe-5cc0-431c-824e-fb0efee4ec4a tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Detaching volume 554d5299-0a48-44f8-bb8e-9328f519c7ee [ 2057.253476] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2057.254036] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2057.254385] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Deleting the datastore file [datastore2] feca8680-4baa-4b2c-9875-69a88b351dc0 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2057.255569] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0e210d0a-137b-440e-92bf-6f32c9dd00b4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.262985] env[62684]: DEBUG oslo_vmware.api [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Waiting for the task: (returnval){ [ 2057.262985] env[62684]: value = "task-2053151" [ 2057.262985] env[62684]: _type = "Task" [ 2057.262985] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2057.272690] env[62684]: DEBUG oslo_vmware.api [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': task-2053151, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2057.287494] env[62684]: INFO nova.virt.block_device [None req-709a49fe-5cc0-431c-824e-fb0efee4ec4a tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Attempting to driver detach volume 554d5299-0a48-44f8-bb8e-9328f519c7ee from mountpoint /dev/sdb [ 2057.287745] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-709a49fe-5cc0-431c-824e-fb0efee4ec4a tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Volume detach. Driver type: vmdk {{(pid=62684) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2057.287948] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-709a49fe-5cc0-431c-824e-fb0efee4ec4a tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421287', 'volume_id': '554d5299-0a48-44f8-bb8e-9328f519c7ee', 'name': 'volume-554d5299-0a48-44f8-bb8e-9328f519c7ee', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'b4cd871a-30ea-4b7a-98ad-00b8676dc2cd', 'attached_at': '', 'detached_at': '', 'volume_id': '554d5299-0a48-44f8-bb8e-9328f519c7ee', 'serial': '554d5299-0a48-44f8-bb8e-9328f519c7ee'} {{(pid=62684) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2057.288817] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07f3cd0e-e2aa-4126-ab14-a293255bbe09 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.315034] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40fb9e6e-0afb-4a5f-a9be-1adc224d6717 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.322789] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5731a64-853d-4918-81af-309c829f83bd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.346660] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5cf022a-2797-409e-84cc-aad8cd97eec5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.363621] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-709a49fe-5cc0-431c-824e-fb0efee4ec4a tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] The volume has not been displaced from its original location: [datastore2] volume-554d5299-0a48-44f8-bb8e-9328f519c7ee/volume-554d5299-0a48-44f8-bb8e-9328f519c7ee.vmdk. No consolidation needed. {{(pid=62684) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2057.369385] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-709a49fe-5cc0-431c-824e-fb0efee4ec4a tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Reconfiguring VM instance instance-00000025 to detach disk 2001 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2057.369716] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bcdab9cb-161b-4993-94f1-7d9fd110c274 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.382923] env[62684]: DEBUG nova.scheduler.client.report [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2057.391914] env[62684]: DEBUG oslo_vmware.api [None req-709a49fe-5cc0-431c-824e-fb0efee4ec4a tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 2057.391914] env[62684]: value = "task-2053152" [ 2057.391914] env[62684]: _type = "Task" [ 2057.391914] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2057.403063] env[62684]: DEBUG oslo_vmware.api [None req-709a49fe-5cc0-431c-824e-fb0efee4ec4a tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053152, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2057.479498] env[62684]: DEBUG nova.compute.manager [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 9964237b-db9b-49cc-a9bd-d62329ea564e] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2057.507827] env[62684]: DEBUG nova.virt.hardware [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2057.508116] env[62684]: DEBUG nova.virt.hardware [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2057.508288] env[62684]: DEBUG nova.virt.hardware [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2057.508480] env[62684]: DEBUG nova.virt.hardware [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2057.508632] env[62684]: DEBUG nova.virt.hardware [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2057.508785] env[62684]: DEBUG nova.virt.hardware [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2057.508998] env[62684]: DEBUG nova.virt.hardware [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2057.509180] env[62684]: DEBUG nova.virt.hardware [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2057.509384] env[62684]: DEBUG nova.virt.hardware [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2057.509560] env[62684]: DEBUG nova.virt.hardware [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2057.509737] env[62684]: DEBUG nova.virt.hardware [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2057.510610] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6d14841-27e6-4927-90cb-a63def6b4ebc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.518350] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-402cb0a2-c8e3-4e3d-a678-89551ec13f8e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.533423] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 9964237b-db9b-49cc-a9bd-d62329ea564e] Instance VIF info [] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2057.538863] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Creating folder: Project (2b508538c55c479ea4357aa781d83f48). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2057.539165] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f3b9504c-cf45-47fa-921f-919b74747f78 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.553176] env[62684]: DEBUG nova.objects.base [None req-c836c08b-93f6-4074-88b1-e203f31e81b2 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Object Instance<025dfe36-1f14-4bda-84a0-d424364b745b> lazy-loaded attributes: flavor,info_cache {{(pid=62684) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2057.561756] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fad6caa6-e54b-4af6-aecb-d0bbe906712a tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Acquiring lock "31419285-9fdf-4d37-94d7-d1b08c6b6b05" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2057.562125] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fad6caa6-e54b-4af6-aecb-d0bbe906712a tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Lock "31419285-9fdf-4d37-94d7-d1b08c6b6b05" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2057.562355] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fad6caa6-e54b-4af6-aecb-d0bbe906712a tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Acquiring lock "31419285-9fdf-4d37-94d7-d1b08c6b6b05-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2057.562544] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fad6caa6-e54b-4af6-aecb-d0bbe906712a tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Lock "31419285-9fdf-4d37-94d7-d1b08c6b6b05-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2057.562721] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fad6caa6-e54b-4af6-aecb-d0bbe906712a tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Lock "31419285-9fdf-4d37-94d7-d1b08c6b6b05-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2057.564642] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Created folder: Project (2b508538c55c479ea4357aa781d83f48) in parent group-v421118. [ 2057.564900] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Creating folder: Instances. Parent ref: group-v421318. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2057.565891] env[62684]: INFO nova.compute.manager [None req-fad6caa6-e54b-4af6-aecb-d0bbe906712a tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Terminating instance [ 2057.567220] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-27d3cc09-94c6-4496-8d2d-a394d046fdba {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.569631] env[62684]: DEBUG nova.compute.manager [None req-fad6caa6-e54b-4af6-aecb-d0bbe906712a tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2057.569830] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-fad6caa6-e54b-4af6-aecb-d0bbe906712a tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2057.570634] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ad98f4f-cfd5-43b8-aa10-f51e4f4054da {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.579753] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-fad6caa6-e54b-4af6-aecb-d0bbe906712a tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2057.580032] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-52909b0a-b998-4e2d-b300-56ee4d6bfe7a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.582773] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Created folder: Instances in parent group-v421318. [ 2057.583135] env[62684]: DEBUG oslo.service.loopingcall [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2057.583593] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9964237b-db9b-49cc-a9bd-d62329ea564e] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2057.583845] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ac5e8ac5-0838-4901-9139-897cca9a8e97 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.598397] env[62684]: DEBUG oslo_vmware.api [None req-fad6caa6-e54b-4af6-aecb-d0bbe906712a tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Waiting for the task: (returnval){ [ 2057.598397] env[62684]: value = "task-2053155" [ 2057.598397] env[62684]: _type = "Task" [ 2057.598397] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2057.603276] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2057.603276] env[62684]: value = "task-2053156" [ 2057.603276] env[62684]: _type = "Task" [ 2057.603276] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2057.610547] env[62684]: DEBUG oslo_vmware.api [None req-fad6caa6-e54b-4af6-aecb-d0bbe906712a tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Task: {'id': task-2053155, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2057.616617] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053156, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2057.642788] env[62684]: DEBUG oslo_vmware.api [None req-3a5ef782-2266-43e7-b8ee-7948bc109898 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053148, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2057.774672] env[62684]: DEBUG oslo_vmware.api [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Task: {'id': task-2053151, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.190492} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2057.775085] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2057.775367] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2057.775660] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2057.775861] env[62684]: INFO nova.compute.manager [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Took 8.28 seconds to destroy the instance on the hypervisor. [ 2057.776440] env[62684]: DEBUG oslo.service.loopingcall [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2057.776523] env[62684]: DEBUG nova.compute.manager [-] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2057.776649] env[62684]: DEBUG nova.network.neutron [-] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2057.888136] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.433s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2057.888709] env[62684]: DEBUG nova.compute.manager [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2057.891262] env[62684]: DEBUG oslo_concurrency.lockutils [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.015s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2057.892648] env[62684]: INFO nova.compute.claims [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2057.904286] env[62684]: DEBUG oslo_vmware.api [None req-709a49fe-5cc0-431c-824e-fb0efee4ec4a tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053152, 'name': ReconfigVM_Task, 'duration_secs': 0.225877} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2057.904537] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-709a49fe-5cc0-431c-824e-fb0efee4ec4a tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Reconfigured VM instance instance-00000025 to detach disk 2001 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2057.910326] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f1869b6-7885-4c85-9769-2975b7411624 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.929595] env[62684]: DEBUG oslo_vmware.api [None req-709a49fe-5cc0-431c-824e-fb0efee4ec4a tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 2057.929595] env[62684]: value = "task-2053157" [ 2057.929595] env[62684]: _type = "Task" [ 2057.929595] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2057.938973] env[62684]: DEBUG oslo_vmware.api [None req-709a49fe-5cc0-431c-824e-fb0efee4ec4a tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053157, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2058.110646] env[62684]: DEBUG oslo_vmware.api [None req-fad6caa6-e54b-4af6-aecb-d0bbe906712a tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Task: {'id': task-2053155, 'name': PowerOffVM_Task, 'duration_secs': 0.250856} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2058.111835] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-fad6caa6-e54b-4af6-aecb-d0bbe906712a tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2058.111835] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-fad6caa6-e54b-4af6-aecb-d0bbe906712a tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2058.112040] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e7d7ee0c-3892-4321-af10-cedcf16f2bb3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.116800] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053156, 'name': CreateVM_Task, 'duration_secs': 0.259332} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2058.116959] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9964237b-db9b-49cc-a9bd-d62329ea564e] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2058.117420] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2058.117634] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2058.117999] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2058.118338] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed24a3ea-e8a8-4687-8697-13f6c5dab132 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.122499] env[62684]: DEBUG oslo_vmware.api [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Waiting for the task: (returnval){ [ 2058.122499] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5294a8ef-f2af-943d-d96f-ac8f3250fb85" [ 2058.122499] env[62684]: _type = "Task" [ 2058.122499] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2058.130412] env[62684]: DEBUG oslo_vmware.api [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5294a8ef-f2af-943d-d96f-ac8f3250fb85, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2058.144716] env[62684]: DEBUG oslo_vmware.api [None req-3a5ef782-2266-43e7-b8ee-7948bc109898 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053148, 'name': CloneVM_Task, 'duration_secs': 5.961162} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2058.144716] env[62684]: INFO nova.virt.vmwareapi.vmops [None req-3a5ef782-2266-43e7-b8ee-7948bc109898 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Created linked-clone VM from snapshot [ 2058.144716] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44c4742f-9fa6-42f3-ba5d-b8f8c6f7a929 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.150789] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-3a5ef782-2266-43e7-b8ee-7948bc109898 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Uploading image c913a438-5830-4a5b-bfe5-58d8f4249780 {{(pid=62684) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2058.165782] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a5ef782-2266-43e7-b8ee-7948bc109898 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Destroying the VM {{(pid=62684) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2058.166107] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-d70b15e4-f9c8-4280-9e60-9674882c0e2a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.174127] env[62684]: DEBUG oslo_vmware.api [None req-3a5ef782-2266-43e7-b8ee-7948bc109898 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 2058.174127] env[62684]: value = "task-2053159" [ 2058.174127] env[62684]: _type = "Task" [ 2058.174127] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2058.185751] env[62684]: DEBUG oslo_vmware.api [None req-3a5ef782-2266-43e7-b8ee-7948bc109898 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053159, 'name': Destroy_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2058.328330] env[62684]: DEBUG nova.network.neutron [None req-c836c08b-93f6-4074-88b1-e203f31e81b2 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Updating instance_info_cache with network_info: [{"id": "1d1c0f31-e026-45f0-b3c8-5ba02555e863", "address": "fa:16:3e:42:6e:d1", "network": {"id": "bf53c8de-5f43-4a15-9911-25340615a63b", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1946277195-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "540d70f4b6274c38a5e79c00e389d8fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6db039c-542c-4544-a57d-ddcc6c1e8e45", "external-id": "nsx-vlan-transportzone-810", "segmentation_id": 810, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d1c0f31-e0", "ovs_interfaceid": "1d1c0f31-e026-45f0-b3c8-5ba02555e863", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2058.396655] env[62684]: DEBUG nova.compute.utils [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2058.400330] env[62684]: DEBUG nova.compute.manager [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Not allocating networking since 'none' was specified. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 2058.440827] env[62684]: DEBUG oslo_vmware.api [None req-709a49fe-5cc0-431c-824e-fb0efee4ec4a tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053157, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2058.487669] env[62684]: DEBUG nova.compute.manager [req-5dda0326-86d5-4976-a4e6-9be273f22c5c req-ca49a092-36b3-4e0d-a697-b65e5c718d87 service nova] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Received event network-vif-deleted-cd978df3-9bd9-4010-88e9-d5a4127cf2a9 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2058.487873] env[62684]: INFO nova.compute.manager [req-5dda0326-86d5-4976-a4e6-9be273f22c5c req-ca49a092-36b3-4e0d-a697-b65e5c718d87 service nova] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Neutron deleted interface cd978df3-9bd9-4010-88e9-d5a4127cf2a9; detaching it from the instance and deleting it from the info cache [ 2058.488061] env[62684]: DEBUG nova.network.neutron [req-5dda0326-86d5-4976-a4e6-9be273f22c5c req-ca49a092-36b3-4e0d-a697-b65e5c718d87 service nova] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2058.491487] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-fad6caa6-e54b-4af6-aecb-d0bbe906712a tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2058.491698] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-fad6caa6-e54b-4af6-aecb-d0bbe906712a tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2058.491878] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-fad6caa6-e54b-4af6-aecb-d0bbe906712a tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Deleting the datastore file [datastore1] 31419285-9fdf-4d37-94d7-d1b08c6b6b05 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2058.492360] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-be0a6655-299a-4069-a120-ed76ca0086e9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.502405] env[62684]: DEBUG oslo_vmware.api [None req-fad6caa6-e54b-4af6-aecb-d0bbe906712a tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Waiting for the task: (returnval){ [ 2058.502405] env[62684]: value = "task-2053160" [ 2058.502405] env[62684]: _type = "Task" [ 2058.502405] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2058.513071] env[62684]: DEBUG oslo_vmware.api [None req-fad6caa6-e54b-4af6-aecb-d0bbe906712a tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Task: {'id': task-2053160, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2058.545561] env[62684]: DEBUG nova.compute.manager [req-a2bbeb6c-91c9-40fc-9ea2-2a300d8090f6 req-931a277f-daef-4d89-8e1a-fe9a4768c69f service nova] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Received event network-vif-plugged-2fce07b0-060c-45c8-8466-125ebacf17b8 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2058.545832] env[62684]: DEBUG oslo_concurrency.lockutils [req-a2bbeb6c-91c9-40fc-9ea2-2a300d8090f6 req-931a277f-daef-4d89-8e1a-fe9a4768c69f service nova] Acquiring lock "42ae6edd-e1f5-4ef8-a248-8f02e94d798e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2058.546083] env[62684]: DEBUG oslo_concurrency.lockutils [req-a2bbeb6c-91c9-40fc-9ea2-2a300d8090f6 req-931a277f-daef-4d89-8e1a-fe9a4768c69f service nova] Lock "42ae6edd-e1f5-4ef8-a248-8f02e94d798e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2058.546282] env[62684]: DEBUG oslo_concurrency.lockutils [req-a2bbeb6c-91c9-40fc-9ea2-2a300d8090f6 req-931a277f-daef-4d89-8e1a-fe9a4768c69f service nova] Lock "42ae6edd-e1f5-4ef8-a248-8f02e94d798e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2058.546476] env[62684]: DEBUG nova.compute.manager [req-a2bbeb6c-91c9-40fc-9ea2-2a300d8090f6 req-931a277f-daef-4d89-8e1a-fe9a4768c69f service nova] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] No waiting events found dispatching network-vif-plugged-2fce07b0-060c-45c8-8466-125ebacf17b8 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2058.546663] env[62684]: WARNING nova.compute.manager [req-a2bbeb6c-91c9-40fc-9ea2-2a300d8090f6 req-931a277f-daef-4d89-8e1a-fe9a4768c69f service nova] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Received unexpected event network-vif-plugged-2fce07b0-060c-45c8-8466-125ebacf17b8 for instance with vm_state building and task_state spawning. [ 2058.599937] env[62684]: DEBUG nova.network.neutron [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Successfully updated port: 2fce07b0-060c-45c8-8466-125ebacf17b8 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2058.634374] env[62684]: DEBUG oslo_vmware.api [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5294a8ef-f2af-943d-d96f-ac8f3250fb85, 'name': SearchDatastore_Task, 'duration_secs': 0.008952} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2058.634729] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2058.634986] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 9964237b-db9b-49cc-a9bd-d62329ea564e] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2058.635507] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2058.635669] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2058.635856] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2058.636357] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b0c93787-8fe3-4a92-8eb4-395d64a3a243 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.645639] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2058.646016] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2058.646565] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db8bd351-a640-4885-84b7-aabba1fe5750 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.652318] env[62684]: DEBUG oslo_vmware.api [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Waiting for the task: (returnval){ [ 2058.652318] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e05d51-ea55-8069-838d-65c835a13ba8" [ 2058.652318] env[62684]: _type = "Task" [ 2058.652318] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2058.659937] env[62684]: DEBUG oslo_vmware.api [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e05d51-ea55-8069-838d-65c835a13ba8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2058.684990] env[62684]: DEBUG oslo_vmware.api [None req-3a5ef782-2266-43e7-b8ee-7948bc109898 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053159, 'name': Destroy_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2058.697886] env[62684]: DEBUG nova.network.neutron [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Successfully updated port: d01b5a43-4c06-4869-b3d4-b610699f6bb1 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2058.833421] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c836c08b-93f6-4074-88b1-e203f31e81b2 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Releasing lock "refresh_cache-025dfe36-1f14-4bda-84a0-d424364b745b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2058.900015] env[62684]: DEBUG nova.compute.manager [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2058.917075] env[62684]: DEBUG nova.network.neutron [-] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2058.942857] env[62684]: DEBUG oslo_vmware.api [None req-709a49fe-5cc0-431c-824e-fb0efee4ec4a tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053157, 'name': ReconfigVM_Task, 'duration_secs': 0.776535} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2058.943536] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-709a49fe-5cc0-431c-824e-fb0efee4ec4a tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421287', 'volume_id': '554d5299-0a48-44f8-bb8e-9328f519c7ee', 'name': 'volume-554d5299-0a48-44f8-bb8e-9328f519c7ee', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'b4cd871a-30ea-4b7a-98ad-00b8676dc2cd', 'attached_at': '', 'detached_at': '', 'volume_id': '554d5299-0a48-44f8-bb8e-9328f519c7ee', 'serial': '554d5299-0a48-44f8-bb8e-9328f519c7ee'} {{(pid=62684) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2058.993720] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ce6d35b1-2f9c-4840-b65e-9e00860659dc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.002929] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54d0dfa7-7bc9-4609-9926-22b187d2e1ca {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.025275] env[62684]: DEBUG oslo_vmware.api [None req-fad6caa6-e54b-4af6-aecb-d0bbe906712a tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Task: {'id': task-2053160, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.195797} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2059.025619] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-fad6caa6-e54b-4af6-aecb-d0bbe906712a tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2059.025779] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-fad6caa6-e54b-4af6-aecb-d0bbe906712a tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2059.025857] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-fad6caa6-e54b-4af6-aecb-d0bbe906712a tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2059.026063] env[62684]: INFO nova.compute.manager [None req-fad6caa6-e54b-4af6-aecb-d0bbe906712a tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Took 1.46 seconds to destroy the instance on the hypervisor. [ 2059.026279] env[62684]: DEBUG oslo.service.loopingcall [None req-fad6caa6-e54b-4af6-aecb-d0bbe906712a tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2059.028589] env[62684]: DEBUG nova.compute.manager [-] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2059.028700] env[62684]: DEBUG nova.network.neutron [-] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2059.040824] env[62684]: DEBUG nova.compute.manager [req-5dda0326-86d5-4976-a4e6-9be273f22c5c req-ca49a092-36b3-4e0d-a697-b65e5c718d87 service nova] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Detach interface failed, port_id=cd978df3-9bd9-4010-88e9-d5a4127cf2a9, reason: Instance feca8680-4baa-4b2c-9875-69a88b351dc0 could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2059.104643] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "refresh_cache-42ae6edd-e1f5-4ef8-a248-8f02e94d798e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2059.104813] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquired lock "refresh_cache-42ae6edd-e1f5-4ef8-a248-8f02e94d798e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2059.104863] env[62684]: DEBUG nova.network.neutron [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2059.162616] env[62684]: DEBUG oslo_vmware.api [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e05d51-ea55-8069-838d-65c835a13ba8, 'name': SearchDatastore_Task, 'duration_secs': 0.011225} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2059.166106] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e14c037-2fe2-4418-8646-8546018bc806 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.172044] env[62684]: DEBUG oslo_vmware.api [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Waiting for the task: (returnval){ [ 2059.172044] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5252319d-1386-db38-b7eb-e423382f2322" [ 2059.172044] env[62684]: _type = "Task" [ 2059.172044] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2059.184306] env[62684]: DEBUG oslo_vmware.api [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5252319d-1386-db38-b7eb-e423382f2322, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2059.189023] env[62684]: DEBUG oslo_vmware.api [None req-3a5ef782-2266-43e7-b8ee-7948bc109898 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053159, 'name': Destroy_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2059.190552] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc5f3468-37ae-41b4-9d48-8973fe398a04 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.196868] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-190ef934-f75e-4f48-b3d6-eac854bc6497 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.201646] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Acquiring lock "refresh_cache-845b2e2a-cee0-4598-afbd-1f07aa52468f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2059.201782] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Acquired lock "refresh_cache-845b2e2a-cee0-4598-afbd-1f07aa52468f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2059.201931] env[62684]: DEBUG nova.network.neutron [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2059.230489] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdbe9c27-7908-4fc2-b680-3d5b534a38ec {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.238921] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59b07f18-1255-465a-a9e1-3b2642f208c9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.252631] env[62684]: DEBUG nova.compute.provider_tree [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2059.262184] env[62684]: DEBUG nova.network.neutron [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2059.337740] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-c836c08b-93f6-4074-88b1-e203f31e81b2 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2059.338124] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-63d533b3-d7b3-43b3-a9e5-e5c9d8028221 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.348545] env[62684]: DEBUG oslo_vmware.api [None req-c836c08b-93f6-4074-88b1-e203f31e81b2 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Waiting for the task: (returnval){ [ 2059.348545] env[62684]: value = "task-2053161" [ 2059.348545] env[62684]: _type = "Task" [ 2059.348545] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2059.356309] env[62684]: DEBUG oslo_vmware.api [None req-c836c08b-93f6-4074-88b1-e203f31e81b2 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053161, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2059.395244] env[62684]: DEBUG nova.network.neutron [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Updating instance_info_cache with network_info: [{"id": "d01b5a43-4c06-4869-b3d4-b610699f6bb1", "address": "fa:16:3e:11:19:a8", "network": {"id": "eb5671f2-a648-452e-a4c0-e81eb932b49c", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-524881696-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "359bcaa2eeb64bcbb6602062777b852e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "001929c7-0dc4-4b73-a9f1-d672f8377985", "external-id": "nsx-vlan-transportzone-230", "segmentation_id": 230, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd01b5a43-4c", "ovs_interfaceid": "d01b5a43-4c06-4869-b3d4-b610699f6bb1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2059.419586] env[62684]: INFO nova.compute.manager [-] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Took 1.64 seconds to deallocate network for instance. [ 2059.511123] env[62684]: DEBUG nova.objects.instance [None req-709a49fe-5cc0-431c-824e-fb0efee4ec4a tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lazy-loading 'flavor' on Instance uuid b4cd871a-30ea-4b7a-98ad-00b8676dc2cd {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2059.637790] env[62684]: DEBUG nova.network.neutron [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2059.687131] env[62684]: DEBUG oslo_vmware.api [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5252319d-1386-db38-b7eb-e423382f2322, 'name': SearchDatastore_Task, 'duration_secs': 0.011281} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2059.687617] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2059.687947] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 9964237b-db9b-49cc-a9bd-d62329ea564e/9964237b-db9b-49cc-a9bd-d62329ea564e.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2059.688253] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9c08830b-c134-44fc-817b-2124688d7888 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.693124] env[62684]: DEBUG oslo_vmware.api [None req-3a5ef782-2266-43e7-b8ee-7948bc109898 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053159, 'name': Destroy_Task, 'duration_secs': 1.436103} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2059.693726] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-3a5ef782-2266-43e7-b8ee-7948bc109898 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Destroyed the VM [ 2059.693972] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-3a5ef782-2266-43e7-b8ee-7948bc109898 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Deleting Snapshot of the VM instance {{(pid=62684) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2059.694257] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-27c015b0-ebfa-44be-9455-467b952ea242 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.698083] env[62684]: DEBUG oslo_vmware.api [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Waiting for the task: (returnval){ [ 2059.698083] env[62684]: value = "task-2053162" [ 2059.698083] env[62684]: _type = "Task" [ 2059.698083] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2059.704488] env[62684]: DEBUG oslo_vmware.api [None req-3a5ef782-2266-43e7-b8ee-7948bc109898 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 2059.704488] env[62684]: value = "task-2053163" [ 2059.704488] env[62684]: _type = "Task" [ 2059.704488] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2059.713642] env[62684]: DEBUG oslo_vmware.api [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053162, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2059.723709] env[62684]: DEBUG oslo_vmware.api [None req-3a5ef782-2266-43e7-b8ee-7948bc109898 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053163, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2059.757218] env[62684]: DEBUG nova.scheduler.client.report [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2059.792166] env[62684]: DEBUG nova.network.neutron [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Updating instance_info_cache with network_info: [{"id": "2fce07b0-060c-45c8-8466-125ebacf17b8", "address": "fa:16:3e:ed:e7:d2", "network": {"id": "aa52badb-0b73-48bc-afaa-5e06a97d5c7d", "bridge": "br-int", "label": "tempest-ServersTestJSON-556342067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c54f74085f343d2b790145b0d82a9f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2fce07b0-06", "ovs_interfaceid": "2fce07b0-060c-45c8-8466-125ebacf17b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2059.862804] env[62684]: DEBUG oslo_vmware.api [None req-c836c08b-93f6-4074-88b1-e203f31e81b2 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053161, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2059.898317] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Releasing lock "refresh_cache-845b2e2a-cee0-4598-afbd-1f07aa52468f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2059.898783] env[62684]: DEBUG nova.compute.manager [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Instance network_info: |[{"id": "d01b5a43-4c06-4869-b3d4-b610699f6bb1", "address": "fa:16:3e:11:19:a8", "network": {"id": "eb5671f2-a648-452e-a4c0-e81eb932b49c", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-524881696-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "359bcaa2eeb64bcbb6602062777b852e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "001929c7-0dc4-4b73-a9f1-d672f8377985", "external-id": "nsx-vlan-transportzone-230", "segmentation_id": 230, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd01b5a43-4c", "ovs_interfaceid": "d01b5a43-4c06-4869-b3d4-b610699f6bb1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2059.899421] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:11:19:a8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '001929c7-0dc4-4b73-a9f1-d672f8377985', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd01b5a43-4c06-4869-b3d4-b610699f6bb1', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2059.912691] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Creating folder: Project (359bcaa2eeb64bcbb6602062777b852e). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2059.914392] env[62684]: DEBUG nova.compute.manager [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2059.918336] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1a5414c6-2084-486f-a480-95c5df1ffed9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.927545] env[62684]: DEBUG oslo_concurrency.lockutils [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2059.933185] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Created folder: Project (359bcaa2eeb64bcbb6602062777b852e) in parent group-v421118. [ 2059.933506] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Creating folder: Instances. Parent ref: group-v421321. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2059.934018] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a52b885e-55ea-489e-8e20-cd6e14ca7834 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.947846] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Created folder: Instances in parent group-v421321. [ 2059.948314] env[62684]: DEBUG oslo.service.loopingcall [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2059.949568] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2059.949568] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-627c88ca-b428-44f9-a8c7-3db3fe461fbd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.975819] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2059.975819] env[62684]: value = "task-2053166" [ 2059.975819] env[62684]: _type = "Task" [ 2059.975819] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2059.982051] env[62684]: DEBUG nova.virt.hardware [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2059.982437] env[62684]: DEBUG nova.virt.hardware [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2059.982522] env[62684]: DEBUG nova.virt.hardware [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2059.982716] env[62684]: DEBUG nova.virt.hardware [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2059.982866] env[62684]: DEBUG nova.virt.hardware [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2059.983125] env[62684]: DEBUG nova.virt.hardware [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2059.983252] env[62684]: DEBUG nova.virt.hardware [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2059.983420] env[62684]: DEBUG nova.virt.hardware [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2059.983595] env[62684]: DEBUG nova.virt.hardware [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2059.983765] env[62684]: DEBUG nova.virt.hardware [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2059.983947] env[62684]: DEBUG nova.virt.hardware [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2059.985245] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4505af3-9049-4edf-8792-1d843390d2c4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.997933] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0690aefd-6eda-4079-9bb3-ea1586f4d30f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.002428] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053166, 'name': CreateVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.015543] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Instance VIF info [] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2060.021592] env[62684]: DEBUG oslo.service.loopingcall [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2060.023388] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2060.023637] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-489e0b5c-d64a-4676-9b76-272f0de8f839 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.042300] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2060.042300] env[62684]: value = "task-2053167" [ 2060.042300] env[62684]: _type = "Task" [ 2060.042300] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2060.051024] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053167, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.211798] env[62684]: DEBUG oslo_vmware.api [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053162, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.466304} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2060.212898] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 9964237b-db9b-49cc-a9bd-d62329ea564e/9964237b-db9b-49cc-a9bd-d62329ea564e.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2060.212898] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 9964237b-db9b-49cc-a9bd-d62329ea564e] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2060.212898] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7c9fdba3-1f81-4316-8076-aa4c098061b3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.218408] env[62684]: DEBUG nova.network.neutron [-] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2060.219116] env[62684]: DEBUG oslo_vmware.api [None req-3a5ef782-2266-43e7-b8ee-7948bc109898 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053163, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.224504] env[62684]: DEBUG oslo_vmware.api [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Waiting for the task: (returnval){ [ 2060.224504] env[62684]: value = "task-2053168" [ 2060.224504] env[62684]: _type = "Task" [ 2060.224504] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2060.232584] env[62684]: DEBUG oslo_vmware.api [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053168, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.262584] env[62684]: DEBUG oslo_concurrency.lockutils [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.371s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2060.263307] env[62684]: DEBUG nova.compute.manager [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2060.266088] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.289s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2060.267745] env[62684]: INFO nova.compute.claims [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2060.297013] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Releasing lock "refresh_cache-42ae6edd-e1f5-4ef8-a248-8f02e94d798e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2060.297513] env[62684]: DEBUG nova.compute.manager [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Instance network_info: |[{"id": "2fce07b0-060c-45c8-8466-125ebacf17b8", "address": "fa:16:3e:ed:e7:d2", "network": {"id": "aa52badb-0b73-48bc-afaa-5e06a97d5c7d", "bridge": "br-int", "label": "tempest-ServersTestJSON-556342067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c54f74085f343d2b790145b0d82a9f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2fce07b0-06", "ovs_interfaceid": "2fce07b0-060c-45c8-8466-125ebacf17b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2060.298255] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ed:e7:d2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1fb81f98-6f5a-47ab-a512-27277591d064', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2fce07b0-060c-45c8-8466-125ebacf17b8', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2060.307821] env[62684]: DEBUG oslo.service.loopingcall [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2060.308899] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2060.309182] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7fc3f627-e8dc-45a1-ba56-09790f77f142 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.338223] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2060.338223] env[62684]: value = "task-2053169" [ 2060.338223] env[62684]: _type = "Task" [ 2060.338223] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2060.347105] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053169, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.363394] env[62684]: DEBUG oslo_vmware.api [None req-c836c08b-93f6-4074-88b1-e203f31e81b2 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053161, 'name': PowerOnVM_Task, 'duration_secs': 0.613878} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2060.363394] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-c836c08b-93f6-4074-88b1-e203f31e81b2 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2060.363394] env[62684]: DEBUG nova.compute.manager [None req-c836c08b-93f6-4074-88b1-e203f31e81b2 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2060.363892] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-648773e5-67f1-42e2-b90f-a4ebe762711b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.486316] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053166, 'name': CreateVM_Task, 'duration_secs': 0.455962} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2060.486488] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2060.487228] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2060.487414] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2060.487744] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2060.488036] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d9d2677-141b-425e-9ed8-b2588a81df41 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.492328] env[62684]: DEBUG oslo_vmware.api [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for the task: (returnval){ [ 2060.492328] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c7642c-e09c-d754-f1a5-20ed4e5fb096" [ 2060.492328] env[62684]: _type = "Task" [ 2060.492328] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2060.500071] env[62684]: DEBUG oslo_vmware.api [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c7642c-e09c-d754-f1a5-20ed4e5fb096, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.520941] env[62684]: DEBUG nova.compute.manager [req-7c8ed60a-d413-4da3-b0b3-a51a82ae683c req-2af08002-5e68-42fe-915d-d8dabac6f9cc service nova] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Received event network-vif-plugged-d01b5a43-4c06-4869-b3d4-b610699f6bb1 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2060.521194] env[62684]: DEBUG oslo_concurrency.lockutils [req-7c8ed60a-d413-4da3-b0b3-a51a82ae683c req-2af08002-5e68-42fe-915d-d8dabac6f9cc service nova] Acquiring lock "845b2e2a-cee0-4598-afbd-1f07aa52468f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2060.521447] env[62684]: DEBUG oslo_concurrency.lockutils [req-7c8ed60a-d413-4da3-b0b3-a51a82ae683c req-2af08002-5e68-42fe-915d-d8dabac6f9cc service nova] Lock "845b2e2a-cee0-4598-afbd-1f07aa52468f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2060.521625] env[62684]: DEBUG oslo_concurrency.lockutils [req-7c8ed60a-d413-4da3-b0b3-a51a82ae683c req-2af08002-5e68-42fe-915d-d8dabac6f9cc service nova] Lock "845b2e2a-cee0-4598-afbd-1f07aa52468f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2060.521798] env[62684]: DEBUG nova.compute.manager [req-7c8ed60a-d413-4da3-b0b3-a51a82ae683c req-2af08002-5e68-42fe-915d-d8dabac6f9cc service nova] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] No waiting events found dispatching network-vif-plugged-d01b5a43-4c06-4869-b3d4-b610699f6bb1 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2060.521970] env[62684]: WARNING nova.compute.manager [req-7c8ed60a-d413-4da3-b0b3-a51a82ae683c req-2af08002-5e68-42fe-915d-d8dabac6f9cc service nova] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Received unexpected event network-vif-plugged-d01b5a43-4c06-4869-b3d4-b610699f6bb1 for instance with vm_state building and task_state spawning. [ 2060.522162] env[62684]: DEBUG nova.compute.manager [req-7c8ed60a-d413-4da3-b0b3-a51a82ae683c req-2af08002-5e68-42fe-915d-d8dabac6f9cc service nova] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Received event network-changed-d01b5a43-4c06-4869-b3d4-b610699f6bb1 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2060.522322] env[62684]: DEBUG nova.compute.manager [req-7c8ed60a-d413-4da3-b0b3-a51a82ae683c req-2af08002-5e68-42fe-915d-d8dabac6f9cc service nova] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Refreshing instance network info cache due to event network-changed-d01b5a43-4c06-4869-b3d4-b610699f6bb1. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2060.522510] env[62684]: DEBUG oslo_concurrency.lockutils [req-7c8ed60a-d413-4da3-b0b3-a51a82ae683c req-2af08002-5e68-42fe-915d-d8dabac6f9cc service nova] Acquiring lock "refresh_cache-845b2e2a-cee0-4598-afbd-1f07aa52468f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2060.522651] env[62684]: DEBUG oslo_concurrency.lockutils [req-7c8ed60a-d413-4da3-b0b3-a51a82ae683c req-2af08002-5e68-42fe-915d-d8dabac6f9cc service nova] Acquired lock "refresh_cache-845b2e2a-cee0-4598-afbd-1f07aa52468f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2060.522813] env[62684]: DEBUG nova.network.neutron [req-7c8ed60a-d413-4da3-b0b3-a51a82ae683c req-2af08002-5e68-42fe-915d-d8dabac6f9cc service nova] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Refreshing network info cache for port d01b5a43-4c06-4869-b3d4-b610699f6bb1 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2060.536669] env[62684]: DEBUG oslo_concurrency.lockutils [None req-709a49fe-5cc0-431c-824e-fb0efee4ec4a tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lock "b4cd871a-30ea-4b7a-98ad-00b8676dc2cd" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.799s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2060.552107] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053167, 'name': CreateVM_Task, 'duration_secs': 0.354248} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2060.552271] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2060.552693] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2060.552857] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2060.553202] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2060.553742] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b74a1001-0a30-444f-94ea-6d86b8d1eb8e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.558700] env[62684]: DEBUG oslo_vmware.api [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Waiting for the task: (returnval){ [ 2060.558700] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521d035d-0ba1-91f7-300c-e8d3a5e72b8d" [ 2060.558700] env[62684]: _type = "Task" [ 2060.558700] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2060.566520] env[62684]: DEBUG oslo_vmware.api [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521d035d-0ba1-91f7-300c-e8d3a5e72b8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.577918] env[62684]: DEBUG nova.compute.manager [req-b3cf8402-d9be-442b-8673-bf192f4c346b req-7f23a3be-d3d5-4cc0-ab35-4ab6a22279b8 service nova] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Received event network-changed-2fce07b0-060c-45c8-8466-125ebacf17b8 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2060.578200] env[62684]: DEBUG nova.compute.manager [req-b3cf8402-d9be-442b-8673-bf192f4c346b req-7f23a3be-d3d5-4cc0-ab35-4ab6a22279b8 service nova] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Refreshing instance network info cache due to event network-changed-2fce07b0-060c-45c8-8466-125ebacf17b8. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2060.578405] env[62684]: DEBUG oslo_concurrency.lockutils [req-b3cf8402-d9be-442b-8673-bf192f4c346b req-7f23a3be-d3d5-4cc0-ab35-4ab6a22279b8 service nova] Acquiring lock "refresh_cache-42ae6edd-e1f5-4ef8-a248-8f02e94d798e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2060.578530] env[62684]: DEBUG oslo_concurrency.lockutils [req-b3cf8402-d9be-442b-8673-bf192f4c346b req-7f23a3be-d3d5-4cc0-ab35-4ab6a22279b8 service nova] Acquired lock "refresh_cache-42ae6edd-e1f5-4ef8-a248-8f02e94d798e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2060.578695] env[62684]: DEBUG nova.network.neutron [req-b3cf8402-d9be-442b-8673-bf192f4c346b req-7f23a3be-d3d5-4cc0-ab35-4ab6a22279b8 service nova] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Refreshing network info cache for port 2fce07b0-060c-45c8-8466-125ebacf17b8 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2060.715467] env[62684]: DEBUG oslo_vmware.api [None req-3a5ef782-2266-43e7-b8ee-7948bc109898 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053163, 'name': RemoveSnapshot_Task, 'duration_secs': 0.706685} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2060.715632] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-3a5ef782-2266-43e7-b8ee-7948bc109898 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Deleted Snapshot of the VM instance {{(pid=62684) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2060.722022] env[62684]: INFO nova.compute.manager [-] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Took 1.69 seconds to deallocate network for instance. [ 2060.734456] env[62684]: DEBUG oslo_vmware.api [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053168, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.772486] env[62684]: DEBUG nova.compute.utils [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2060.775267] env[62684]: DEBUG nova.compute.manager [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2060.775373] env[62684]: DEBUG nova.network.neutron [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2060.815244] env[62684]: DEBUG nova.policy [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a3800d71923848db8635de9a8a2ff9f6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '76d88ac878d44480b3b54b24ab87efa9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2060.850356] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053169, 'name': CreateVM_Task, 'duration_secs': 0.40422} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2060.850548] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2060.851301] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2061.008707] env[62684]: DEBUG oslo_vmware.api [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c7642c-e09c-d754-f1a5-20ed4e5fb096, 'name': SearchDatastore_Task, 'duration_secs': 0.027825} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2061.009008] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2061.009261] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2061.009536] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2061.009690] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2061.009878] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2061.010190] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2061.010505] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2061.010742] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b78e94be-5882-4d16-a1a6-4a144f0b8aec {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.012949] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5333b856-e986-4abe-8c21-35e02aed7255 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.017795] env[62684]: DEBUG oslo_vmware.api [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2061.017795] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d7888c-6e61-c420-3861-f5f80edafbe6" [ 2061.017795] env[62684]: _type = "Task" [ 2061.017795] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2061.025784] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2061.025784] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2061.030142] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-865fde38-bbca-4844-a241-b7d67c71a896 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.032479] env[62684]: DEBUG oslo_vmware.api [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d7888c-6e61-c420-3861-f5f80edafbe6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.035829] env[62684]: DEBUG oslo_vmware.api [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for the task: (returnval){ [ 2061.035829] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5284848b-2516-4928-d9df-97d1554ead96" [ 2061.035829] env[62684]: _type = "Task" [ 2061.035829] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2061.044188] env[62684]: DEBUG oslo_vmware.api [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5284848b-2516-4928-d9df-97d1554ead96, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.070537] env[62684]: DEBUG oslo_vmware.api [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521d035d-0ba1-91f7-300c-e8d3a5e72b8d, 'name': SearchDatastore_Task, 'duration_secs': 0.028433} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2061.070894] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2061.071178] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2061.071478] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2061.071564] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2061.071743] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2061.072026] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cb76b8bb-6b09-48c5-a9a8-b9f6c2322272 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.086953] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2061.087294] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2061.088827] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40a86275-a9ca-4648-8d7b-e4913e7a86d7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.095222] env[62684]: DEBUG oslo_vmware.api [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Waiting for the task: (returnval){ [ 2061.095222] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]525c5b87-97e1-a3a3-30af-78e5d2583226" [ 2061.095222] env[62684]: _type = "Task" [ 2061.095222] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2061.105177] env[62684]: DEBUG oslo_vmware.api [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]525c5b87-97e1-a3a3-30af-78e5d2583226, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.158897] env[62684]: DEBUG nova.network.neutron [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Successfully created port: b5e1d42c-c9c1-4c43-83b7-81eac7065383 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2061.222235] env[62684]: WARNING nova.compute.manager [None req-3a5ef782-2266-43e7-b8ee-7948bc109898 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Image not found during snapshot: nova.exception.ImageNotFound: Image c913a438-5830-4a5b-bfe5-58d8f4249780 could not be found. [ 2061.231330] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fad6caa6-e54b-4af6-aecb-d0bbe906712a tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2061.239336] env[62684]: DEBUG oslo_vmware.api [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053168, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.278489] env[62684]: DEBUG nova.compute.manager [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2061.329947] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquiring lock "b4cd871a-30ea-4b7a-98ad-00b8676dc2cd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2061.330273] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lock "b4cd871a-30ea-4b7a-98ad-00b8676dc2cd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2061.330490] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquiring lock "b4cd871a-30ea-4b7a-98ad-00b8676dc2cd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2061.330677] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lock "b4cd871a-30ea-4b7a-98ad-00b8676dc2cd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2061.330851] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lock "b4cd871a-30ea-4b7a-98ad-00b8676dc2cd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2061.332867] env[62684]: INFO nova.compute.manager [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Terminating instance [ 2061.335167] env[62684]: DEBUG nova.compute.manager [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2061.335373] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2061.336777] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd78a99d-9f73-4ecd-a01d-ef3ab4b845af {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.349141] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2061.350093] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b481c37d-f5a7-4d71-bd95-5dc4940f786c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.356561] env[62684]: DEBUG oslo_vmware.api [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 2061.356561] env[62684]: value = "task-2053170" [ 2061.356561] env[62684]: _type = "Task" [ 2061.356561] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2061.370783] env[62684]: DEBUG oslo_vmware.api [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053170, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.531823] env[62684]: DEBUG oslo_vmware.api [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d7888c-6e61-c420-3861-f5f80edafbe6, 'name': SearchDatastore_Task, 'duration_secs': 0.011078} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2061.532031] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2061.532292] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2061.532522] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2061.547970] env[62684]: DEBUG oslo_vmware.api [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5284848b-2516-4928-d9df-97d1554ead96, 'name': SearchDatastore_Task, 'duration_secs': 0.025978} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2061.548794] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d1f9ef3-f7e4-4e61-abb3-9f0919c702b4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.556558] env[62684]: DEBUG oslo_vmware.api [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for the task: (returnval){ [ 2061.556558] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5295ef58-ed06-a17d-df25-1e59ecf82c91" [ 2061.556558] env[62684]: _type = "Task" [ 2061.556558] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2061.565750] env[62684]: DEBUG oslo_vmware.api [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5295ef58-ed06-a17d-df25-1e59ecf82c91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.597075] env[62684]: DEBUG nova.network.neutron [req-7c8ed60a-d413-4da3-b0b3-a51a82ae683c req-2af08002-5e68-42fe-915d-d8dabac6f9cc service nova] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Updated VIF entry in instance network info cache for port d01b5a43-4c06-4869-b3d4-b610699f6bb1. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2061.597570] env[62684]: DEBUG nova.network.neutron [req-7c8ed60a-d413-4da3-b0b3-a51a82ae683c req-2af08002-5e68-42fe-915d-d8dabac6f9cc service nova] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Updating instance_info_cache with network_info: [{"id": "d01b5a43-4c06-4869-b3d4-b610699f6bb1", "address": "fa:16:3e:11:19:a8", "network": {"id": "eb5671f2-a648-452e-a4c0-e81eb932b49c", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-524881696-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "359bcaa2eeb64bcbb6602062777b852e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "001929c7-0dc4-4b73-a9f1-d672f8377985", "external-id": "nsx-vlan-transportzone-230", "segmentation_id": 230, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd01b5a43-4c", "ovs_interfaceid": "d01b5a43-4c06-4869-b3d4-b610699f6bb1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2061.607754] env[62684]: DEBUG oslo_vmware.api [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]525c5b87-97e1-a3a3-30af-78e5d2583226, 'name': SearchDatastore_Task, 'duration_secs': 0.024177} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2061.608639] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71ffcb52-38c2-4d26-9824-aa663fe86ec3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.612578] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39f3322b-4667-428d-a7a2-ffc766479968 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.618422] env[62684]: DEBUG oslo_vmware.api [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Waiting for the task: (returnval){ [ 2061.618422] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52afe88a-14fa-7f3a-6577-0a908becbb1f" [ 2061.618422] env[62684]: _type = "Task" [ 2061.618422] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2061.623786] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee7dc7f3-b874-4aa3-b874-fc3e82960a9c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.631637] env[62684]: DEBUG oslo_vmware.api [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52afe88a-14fa-7f3a-6577-0a908becbb1f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.656747] env[62684]: DEBUG nova.network.neutron [req-b3cf8402-d9be-442b-8673-bf192f4c346b req-7f23a3be-d3d5-4cc0-ab35-4ab6a22279b8 service nova] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Updated VIF entry in instance network info cache for port 2fce07b0-060c-45c8-8466-125ebacf17b8. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2061.657173] env[62684]: DEBUG nova.network.neutron [req-b3cf8402-d9be-442b-8673-bf192f4c346b req-7f23a3be-d3d5-4cc0-ab35-4ab6a22279b8 service nova] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Updating instance_info_cache with network_info: [{"id": "2fce07b0-060c-45c8-8466-125ebacf17b8", "address": "fa:16:3e:ed:e7:d2", "network": {"id": "aa52badb-0b73-48bc-afaa-5e06a97d5c7d", "bridge": "br-int", "label": "tempest-ServersTestJSON-556342067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c54f74085f343d2b790145b0d82a9f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2fce07b0-06", "ovs_interfaceid": "2fce07b0-060c-45c8-8466-125ebacf17b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2061.659609] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee90cff1-5e40-4967-a510-22ae99b4c223 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.667971] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8031db2f-1c28-4adb-92d5-6574517f587e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.682028] env[62684]: DEBUG nova.compute.provider_tree [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2061.735756] env[62684]: DEBUG oslo_vmware.api [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053168, 'name': ExtendVirtualDisk_Task, 'duration_secs': 1.019485} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2061.736042] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 9964237b-db9b-49cc-a9bd-d62329ea564e] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2061.737038] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f779bcd-ffb8-455b-bc0d-468ee4199a05 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.758518] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 9964237b-db9b-49cc-a9bd-d62329ea564e] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] 9964237b-db9b-49cc-a9bd-d62329ea564e/9964237b-db9b-49cc-a9bd-d62329ea564e.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2061.759191] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0c051498-6709-44d1-92fe-5ca91038c985 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.779175] env[62684]: DEBUG oslo_vmware.api [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Waiting for the task: (returnval){ [ 2061.779175] env[62684]: value = "task-2053171" [ 2061.779175] env[62684]: _type = "Task" [ 2061.779175] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2061.787306] env[62684]: DEBUG oslo_vmware.api [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053171, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.868697] env[62684]: DEBUG oslo_vmware.api [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053170, 'name': PowerOffVM_Task, 'duration_secs': 0.243776} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2061.868990] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2061.869195] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2061.869462] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-29fc0a41-d614-4004-8157-72c4e4b96eb7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.896368] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9d94250a-62a3-4e66-b394-66b8a8078402 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "41da0c18-dd9c-49bb-8b0d-a907575ee22e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2061.896670] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9d94250a-62a3-4e66-b394-66b8a8078402 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "41da0c18-dd9c-49bb-8b0d-a907575ee22e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2061.896894] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9d94250a-62a3-4e66-b394-66b8a8078402 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "41da0c18-dd9c-49bb-8b0d-a907575ee22e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2061.897187] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9d94250a-62a3-4e66-b394-66b8a8078402 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "41da0c18-dd9c-49bb-8b0d-a907575ee22e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2061.897397] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9d94250a-62a3-4e66-b394-66b8a8078402 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "41da0c18-dd9c-49bb-8b0d-a907575ee22e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2061.899722] env[62684]: INFO nova.compute.manager [None req-9d94250a-62a3-4e66-b394-66b8a8078402 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Terminating instance [ 2061.901569] env[62684]: DEBUG nova.compute.manager [None req-9d94250a-62a3-4e66-b394-66b8a8078402 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2061.901761] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9d94250a-62a3-4e66-b394-66b8a8078402 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2061.902588] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86c9b626-dbc3-48e3-99ea-7ab0dcf9eb5d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.909981] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d94250a-62a3-4e66-b394-66b8a8078402 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2061.910230] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-56d3cc30-d157-4ea7-a29b-31d310d3ef87 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.916007] env[62684]: DEBUG oslo_vmware.api [None req-9d94250a-62a3-4e66-b394-66b8a8078402 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 2061.916007] env[62684]: value = "task-2053173" [ 2061.916007] env[62684]: _type = "Task" [ 2061.916007] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2061.923767] env[62684]: DEBUG oslo_vmware.api [None req-9d94250a-62a3-4e66-b394-66b8a8078402 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053173, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.964104] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2061.964554] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2061.964943] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Deleting the datastore file [datastore2] b4cd871a-30ea-4b7a-98ad-00b8676dc2cd {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2061.965334] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-882f3ec9-cda8-4e74-be16-dfcd4378fff0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.972286] env[62684]: DEBUG oslo_vmware.api [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 2061.972286] env[62684]: value = "task-2053174" [ 2061.972286] env[62684]: _type = "Task" [ 2061.972286] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2061.980288] env[62684]: DEBUG oslo_vmware.api [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053174, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2062.072033] env[62684]: DEBUG oslo_vmware.api [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5295ef58-ed06-a17d-df25-1e59ecf82c91, 'name': SearchDatastore_Task, 'duration_secs': 0.02814} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2062.072592] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2062.073082] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 845b2e2a-cee0-4598-afbd-1f07aa52468f/845b2e2a-cee0-4598-afbd-1f07aa52468f.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2062.073586] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2062.073961] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2062.074353] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f095aea9-c8c5-4c6d-8711-892c688fdf22 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.078334] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b0dcb590-3ef9-46e3-a1a1-4a0500508828 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.087416] env[62684]: DEBUG oslo_vmware.api [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for the task: (returnval){ [ 2062.087416] env[62684]: value = "task-2053175" [ 2062.087416] env[62684]: _type = "Task" [ 2062.087416] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2062.092149] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2062.092468] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2062.093539] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f60fe2c5-a11f-4aa4-88a8-f9303c540620 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.099300] env[62684]: DEBUG oslo_vmware.api [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053175, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2062.102877] env[62684]: DEBUG oslo_concurrency.lockutils [req-7c8ed60a-d413-4da3-b0b3-a51a82ae683c req-2af08002-5e68-42fe-915d-d8dabac6f9cc service nova] Releasing lock "refresh_cache-845b2e2a-cee0-4598-afbd-1f07aa52468f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2062.103239] env[62684]: DEBUG nova.compute.manager [req-7c8ed60a-d413-4da3-b0b3-a51a82ae683c req-2af08002-5e68-42fe-915d-d8dabac6f9cc service nova] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Received event network-vif-deleted-d15c8cc5-3728-42eb-8a3a-e5b3390bf5dd {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2062.103682] env[62684]: DEBUG oslo_vmware.api [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2062.103682] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5285334a-3ba0-a626-1cb4-11346f2eb2fd" [ 2062.103682] env[62684]: _type = "Task" [ 2062.103682] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2062.111526] env[62684]: DEBUG oslo_vmware.api [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5285334a-3ba0-a626-1cb4-11346f2eb2fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2062.129381] env[62684]: DEBUG oslo_vmware.api [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52afe88a-14fa-7f3a-6577-0a908becbb1f, 'name': SearchDatastore_Task, 'duration_secs': 0.026831} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2062.129658] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2062.129922] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 2f8f7e02-54fb-4275-badb-35c0b840ab33/2f8f7e02-54fb-4275-badb-35c0b840ab33.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2062.130212] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-de4ee5b3-9429-485c-9f42-a14cc2e3d558 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.135930] env[62684]: DEBUG oslo_vmware.api [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Waiting for the task: (returnval){ [ 2062.135930] env[62684]: value = "task-2053176" [ 2062.135930] env[62684]: _type = "Task" [ 2062.135930] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2062.143378] env[62684]: DEBUG oslo_vmware.api [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053176, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2062.160146] env[62684]: DEBUG oslo_concurrency.lockutils [req-b3cf8402-d9be-442b-8673-bf192f4c346b req-7f23a3be-d3d5-4cc0-ab35-4ab6a22279b8 service nova] Releasing lock "refresh_cache-42ae6edd-e1f5-4ef8-a248-8f02e94d798e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2062.185597] env[62684]: DEBUG nova.scheduler.client.report [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2062.290081] env[62684]: DEBUG oslo_vmware.api [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053171, 'name': ReconfigVM_Task, 'duration_secs': 0.282512} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2062.290081] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 9964237b-db9b-49cc-a9bd-d62329ea564e] Reconfigured VM instance instance-0000004a to attach disk [datastore2] 9964237b-db9b-49cc-a9bd-d62329ea564e/9964237b-db9b-49cc-a9bd-d62329ea564e.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2062.290542] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-75ffcafe-b6cd-40f8-a051-53f6a373c31f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.296108] env[62684]: DEBUG nova.compute.manager [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2062.300209] env[62684]: DEBUG oslo_vmware.api [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Waiting for the task: (returnval){ [ 2062.300209] env[62684]: value = "task-2053177" [ 2062.300209] env[62684]: _type = "Task" [ 2062.300209] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2062.312484] env[62684]: DEBUG oslo_vmware.api [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053177, 'name': Rename_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2062.330950] env[62684]: DEBUG nova.virt.hardware [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2062.331340] env[62684]: DEBUG nova.virt.hardware [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2062.331612] env[62684]: DEBUG nova.virt.hardware [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2062.331832] env[62684]: DEBUG nova.virt.hardware [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2062.331986] env[62684]: DEBUG nova.virt.hardware [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2062.332169] env[62684]: DEBUG nova.virt.hardware [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2062.332401] env[62684]: DEBUG nova.virt.hardware [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2062.332586] env[62684]: DEBUG nova.virt.hardware [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2062.332806] env[62684]: DEBUG nova.virt.hardware [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2062.333465] env[62684]: DEBUG nova.virt.hardware [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2062.333465] env[62684]: DEBUG nova.virt.hardware [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2062.334720] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1741345c-d38e-40b9-a64e-220ebe1b933c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.344174] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f98c5aa-9972-492c-b424-e96abc6e0c17 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.426339] env[62684]: DEBUG oslo_vmware.api [None req-9d94250a-62a3-4e66-b394-66b8a8078402 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053173, 'name': PowerOffVM_Task, 'duration_secs': 0.192583} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2062.426710] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d94250a-62a3-4e66-b394-66b8a8078402 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2062.426912] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9d94250a-62a3-4e66-b394-66b8a8078402 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2062.427250] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8b853671-8a25-4fa5-82c1-4fd6517376ef {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.484840] env[62684]: DEBUG oslo_vmware.api [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053174, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138557} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2062.485274] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2062.485527] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2062.486125] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2062.486238] env[62684]: INFO nova.compute.manager [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Took 1.15 seconds to destroy the instance on the hypervisor. [ 2062.486690] env[62684]: DEBUG oslo.service.loopingcall [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2062.487183] env[62684]: DEBUG nova.compute.manager [-] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2062.487311] env[62684]: DEBUG nova.network.neutron [-] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2062.585920] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9d94250a-62a3-4e66-b394-66b8a8078402 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2062.586197] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9d94250a-62a3-4e66-b394-66b8a8078402 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2062.586395] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d94250a-62a3-4e66-b394-66b8a8078402 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Deleting the datastore file [datastore1] 41da0c18-dd9c-49bb-8b0d-a907575ee22e {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2062.586881] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-12b6917f-d2cf-4edf-8434-9f40ff7854b6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.598277] env[62684]: DEBUG oslo_vmware.api [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053175, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2062.600914] env[62684]: DEBUG oslo_vmware.api [None req-9d94250a-62a3-4e66-b394-66b8a8078402 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for the task: (returnval){ [ 2062.600914] env[62684]: value = "task-2053179" [ 2062.600914] env[62684]: _type = "Task" [ 2062.600914] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2062.614108] env[62684]: DEBUG oslo_vmware.api [None req-9d94250a-62a3-4e66-b394-66b8a8078402 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053179, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2062.619203] env[62684]: DEBUG oslo_vmware.api [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5285334a-3ba0-a626-1cb4-11346f2eb2fd, 'name': SearchDatastore_Task, 'duration_secs': 0.018041} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2062.620382] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea3004d4-aa72-4fb6-bc6a-6fbd76916cd2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.626806] env[62684]: DEBUG oslo_vmware.api [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2062.626806] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5224a51d-1c50-d307-f76c-d45292787774" [ 2062.626806] env[62684]: _type = "Task" [ 2062.626806] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2062.635734] env[62684]: DEBUG oslo_vmware.api [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5224a51d-1c50-d307-f76c-d45292787774, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2062.639411] env[62684]: DEBUG nova.compute.manager [req-59daadd5-51f2-4312-bd89-62cb9eaa456c req-e02a5f90-c9b8-4e27-8fa1-243c8c228826 service nova] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Received event network-vif-plugged-b5e1d42c-c9c1-4c43-83b7-81eac7065383 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2062.639859] env[62684]: DEBUG oslo_concurrency.lockutils [req-59daadd5-51f2-4312-bd89-62cb9eaa456c req-e02a5f90-c9b8-4e27-8fa1-243c8c228826 service nova] Acquiring lock "983218ac-7cf3-48ef-88d8-aa9e9322df4b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2062.640036] env[62684]: DEBUG oslo_concurrency.lockutils [req-59daadd5-51f2-4312-bd89-62cb9eaa456c req-e02a5f90-c9b8-4e27-8fa1-243c8c228826 service nova] Lock "983218ac-7cf3-48ef-88d8-aa9e9322df4b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2062.640169] env[62684]: DEBUG oslo_concurrency.lockutils [req-59daadd5-51f2-4312-bd89-62cb9eaa456c req-e02a5f90-c9b8-4e27-8fa1-243c8c228826 service nova] Lock "983218ac-7cf3-48ef-88d8-aa9e9322df4b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2062.640430] env[62684]: DEBUG nova.compute.manager [req-59daadd5-51f2-4312-bd89-62cb9eaa456c req-e02a5f90-c9b8-4e27-8fa1-243c8c228826 service nova] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] No waiting events found dispatching network-vif-plugged-b5e1d42c-c9c1-4c43-83b7-81eac7065383 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2062.640623] env[62684]: WARNING nova.compute.manager [req-59daadd5-51f2-4312-bd89-62cb9eaa456c req-e02a5f90-c9b8-4e27-8fa1-243c8c228826 service nova] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Received unexpected event network-vif-plugged-b5e1d42c-c9c1-4c43-83b7-81eac7065383 for instance with vm_state building and task_state spawning. [ 2062.651480] env[62684]: DEBUG oslo_vmware.api [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053176, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2062.690781] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.425s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2062.691594] env[62684]: DEBUG nova.compute.manager [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2062.694057] env[62684]: DEBUG oslo_concurrency.lockutils [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.767s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2062.694318] env[62684]: DEBUG nova.objects.instance [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Lazy-loading 'resources' on Instance uuid feca8680-4baa-4b2c-9875-69a88b351dc0 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2062.745305] env[62684]: DEBUG nova.network.neutron [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Successfully updated port: b5e1d42c-c9c1-4c43-83b7-81eac7065383 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2062.810529] env[62684]: DEBUG oslo_vmware.api [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053177, 'name': Rename_Task, 'duration_secs': 0.227974} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2062.810839] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 9964237b-db9b-49cc-a9bd-d62329ea564e] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2062.811111] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-460f4f12-9c8e-4fd8-8118-14625fc95f31 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.818662] env[62684]: DEBUG oslo_vmware.api [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Waiting for the task: (returnval){ [ 2062.818662] env[62684]: value = "task-2053180" [ 2062.818662] env[62684]: _type = "Task" [ 2062.818662] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2062.826933] env[62684]: DEBUG oslo_vmware.api [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053180, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2063.099794] env[62684]: DEBUG oslo_vmware.api [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053175, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.618921} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2063.100809] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 845b2e2a-cee0-4598-afbd-1f07aa52468f/845b2e2a-cee0-4598-afbd-1f07aa52468f.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2063.100809] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2063.101167] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1560b216-6682-4cc5-aa37-4fc6245138d4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.117353] env[62684]: DEBUG oslo_vmware.api [None req-9d94250a-62a3-4e66-b394-66b8a8078402 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Task: {'id': task-2053179, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.415719} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2063.118831] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d94250a-62a3-4e66-b394-66b8a8078402 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2063.119201] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9d94250a-62a3-4e66-b394-66b8a8078402 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2063.119641] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9d94250a-62a3-4e66-b394-66b8a8078402 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2063.120028] env[62684]: INFO nova.compute.manager [None req-9d94250a-62a3-4e66-b394-66b8a8078402 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Took 1.22 seconds to destroy the instance on the hypervisor. [ 2063.120425] env[62684]: DEBUG oslo.service.loopingcall [None req-9d94250a-62a3-4e66-b394-66b8a8078402 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2063.123260] env[62684]: DEBUG oslo_vmware.api [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for the task: (returnval){ [ 2063.123260] env[62684]: value = "task-2053181" [ 2063.123260] env[62684]: _type = "Task" [ 2063.123260] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2063.123260] env[62684]: DEBUG nova.compute.manager [-] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2063.123260] env[62684]: DEBUG nova.network.neutron [-] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2063.135605] env[62684]: DEBUG oslo_vmware.api [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053181, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2063.146715] env[62684]: DEBUG oslo_vmware.api [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5224a51d-1c50-d307-f76c-d45292787774, 'name': SearchDatastore_Task, 'duration_secs': 0.052819} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2063.147313] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2063.147723] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 42ae6edd-e1f5-4ef8-a248-8f02e94d798e/42ae6edd-e1f5-4ef8-a248-8f02e94d798e.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2063.148319] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e9d0b387-8828-42a6-8c0e-98c7adb352be {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.154399] env[62684]: DEBUG oslo_vmware.api [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053176, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.606456} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2063.155652] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 2f8f7e02-54fb-4275-badb-35c0b840ab33/2f8f7e02-54fb-4275-badb-35c0b840ab33.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2063.155652] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2063.155871] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ccfe6a64-c27a-4d16-93e6-3b2d5cfbb637 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.159390] env[62684]: DEBUG oslo_vmware.api [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2063.159390] env[62684]: value = "task-2053182" [ 2063.159390] env[62684]: _type = "Task" [ 2063.159390] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2063.164441] env[62684]: DEBUG oslo_vmware.api [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Waiting for the task: (returnval){ [ 2063.164441] env[62684]: value = "task-2053183" [ 2063.164441] env[62684]: _type = "Task" [ 2063.164441] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2063.173990] env[62684]: DEBUG nova.compute.manager [req-b4549ff8-baab-47aa-a3e3-8a7fa5622176 req-9a24c53c-1008-4ee4-a7df-3c54e887b5ab service nova] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Received event network-vif-deleted-8be48385-37eb-4c2e-baf8-404a9aad87de {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2063.174297] env[62684]: INFO nova.compute.manager [req-b4549ff8-baab-47aa-a3e3-8a7fa5622176 req-9a24c53c-1008-4ee4-a7df-3c54e887b5ab service nova] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Neutron deleted interface 8be48385-37eb-4c2e-baf8-404a9aad87de; detaching it from the instance and deleting it from the info cache [ 2063.174552] env[62684]: DEBUG nova.network.neutron [req-b4549ff8-baab-47aa-a3e3-8a7fa5622176 req-9a24c53c-1008-4ee4-a7df-3c54e887b5ab service nova] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2063.176395] env[62684]: DEBUG oslo_vmware.api [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053182, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2063.181837] env[62684]: DEBUG oslo_vmware.api [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053183, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2063.197288] env[62684]: DEBUG nova.compute.utils [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2063.203855] env[62684]: DEBUG nova.compute.manager [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2063.203855] env[62684]: DEBUG nova.network.neutron [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2063.245711] env[62684]: DEBUG nova.policy [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dd099e9982464c4c9fdf101959f7ace5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '200eebbae628463b9decddf3546b18df', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2063.247479] env[62684]: DEBUG oslo_concurrency.lockutils [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "refresh_cache-983218ac-7cf3-48ef-88d8-aa9e9322df4b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2063.247639] env[62684]: DEBUG oslo_concurrency.lockutils [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquired lock "refresh_cache-983218ac-7cf3-48ef-88d8-aa9e9322df4b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2063.247768] env[62684]: DEBUG nova.network.neutron [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2063.331323] env[62684]: DEBUG oslo_vmware.api [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053180, 'name': PowerOnVM_Task, 'duration_secs': 0.464893} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2063.331650] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 9964237b-db9b-49cc-a9bd-d62329ea564e] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2063.331886] env[62684]: INFO nova.compute.manager [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 9964237b-db9b-49cc-a9bd-d62329ea564e] Took 5.85 seconds to spawn the instance on the hypervisor. [ 2063.332043] env[62684]: DEBUG nova.compute.manager [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 9964237b-db9b-49cc-a9bd-d62329ea564e] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2063.332918] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a0f4f07-2a49-42d5-82ef-8581a7017422 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.552156] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ebf9b94-62a7-4ccf-ab38-644d8cdfca46 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.560212] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dda523e-53a5-47bc-a649-2d6320bc843e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.595437] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1313831-3f9f-484f-adb8-f00c8f1886b1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.603684] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8839c24f-f9d9-4fdb-a317-8a9e9aa6d51d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.618506] env[62684]: DEBUG nova.compute.provider_tree [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2063.631957] env[62684]: DEBUG oslo_vmware.api [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053181, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082903} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2063.633466] env[62684]: DEBUG nova.network.neutron [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Successfully created port: 868206c0-a9c5-4e53-a798-830df2d0c619 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2063.635769] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2063.637267] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e82a33f-c312-4cfb-8dff-1cb347ac4262 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.651538] env[62684]: DEBUG nova.network.neutron [-] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2063.661030] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] 845b2e2a-cee0-4598-afbd-1f07aa52468f/845b2e2a-cee0-4598-afbd-1f07aa52468f.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2063.662082] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d38b49b9-75dd-41f4-814b-f77795fb5521 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.682182] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a5aff532-75e3-470f-ad19-de3097edbb23 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.686315] env[62684]: DEBUG oslo_vmware.api [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for the task: (returnval){ [ 2063.686315] env[62684]: value = "task-2053184" [ 2063.686315] env[62684]: _type = "Task" [ 2063.686315] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2063.689879] env[62684]: DEBUG oslo_vmware.api [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053182, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.492569} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2063.695473] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 42ae6edd-e1f5-4ef8-a248-8f02e94d798e/42ae6edd-e1f5-4ef8-a248-8f02e94d798e.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2063.695696] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2063.695942] env[62684]: DEBUG oslo_vmware.api [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053183, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.188226} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2063.696448] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9d52b2df-4235-40ab-a7a6-20fa48e5fe11 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.698334] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2063.701013] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5873dade-7d05-42d0-8044-f07a2d1db7c4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.711676] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aa9deeb-ef4e-42f0-a08f-94c38f74b0d6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.715892] env[62684]: DEBUG nova.compute.manager [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2063.721634] env[62684]: DEBUG oslo_vmware.api [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053184, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2063.739788] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Reconfiguring VM instance instance-0000004b to attach disk [datastore2] 2f8f7e02-54fb-4275-badb-35c0b840ab33/2f8f7e02-54fb-4275-badb-35c0b840ab33.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2063.751210] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-28f12963-51d3-47ce-a58f-cce49a62e909 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.765503] env[62684]: DEBUG oslo_vmware.api [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2063.765503] env[62684]: value = "task-2053185" [ 2063.765503] env[62684]: _type = "Task" [ 2063.765503] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2063.767081] env[62684]: DEBUG nova.compute.manager [req-b4549ff8-baab-47aa-a3e3-8a7fa5622176 req-9a24c53c-1008-4ee4-a7df-3c54e887b5ab service nova] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Detach interface failed, port_id=8be48385-37eb-4c2e-baf8-404a9aad87de, reason: Instance b4cd871a-30ea-4b7a-98ad-00b8676dc2cd could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2063.774305] env[62684]: DEBUG oslo_vmware.api [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Waiting for the task: (returnval){ [ 2063.774305] env[62684]: value = "task-2053186" [ 2063.774305] env[62684]: _type = "Task" [ 2063.774305] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2063.776995] env[62684]: DEBUG oslo_vmware.api [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053185, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2063.784700] env[62684]: DEBUG oslo_vmware.api [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053186, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2063.799178] env[62684]: DEBUG nova.network.neutron [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2063.853609] env[62684]: INFO nova.compute.manager [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 9964237b-db9b-49cc-a9bd-d62329ea564e] Took 11.88 seconds to build instance. [ 2063.920811] env[62684]: DEBUG nova.network.neutron [-] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2063.950621] env[62684]: DEBUG nova.network.neutron [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Updating instance_info_cache with network_info: [{"id": "b5e1d42c-c9c1-4c43-83b7-81eac7065383", "address": "fa:16:3e:6c:54:30", "network": {"id": "7678b347-6a54-4b84-9a4d-b566bbeb1ea4", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-51664912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76d88ac878d44480b3b54b24ab87efa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5e1d42c-c9", "ovs_interfaceid": "b5e1d42c-c9c1-4c43-83b7-81eac7065383", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2064.141251] env[62684]: ERROR nova.scheduler.client.report [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] [req-a07d5ac3-1248-4c1f-89e2-b58ec974a934] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c23c281e-ec1f-4876-972e-a98655f2084f. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a07d5ac3-1248-4c1f-89e2-b58ec974a934"}]} [ 2064.159158] env[62684]: DEBUG nova.scheduler.client.report [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Refreshing inventories for resource provider c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2064.162432] env[62684]: INFO nova.compute.manager [-] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Took 1.68 seconds to deallocate network for instance. [ 2064.174429] env[62684]: DEBUG nova.scheduler.client.report [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Updating ProviderTree inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2064.174670] env[62684]: DEBUG nova.compute.provider_tree [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2064.194725] env[62684]: DEBUG nova.scheduler.client.report [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Refreshing aggregate associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, aggregates: None {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2064.204501] env[62684]: DEBUG oslo_vmware.api [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053184, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2064.213991] env[62684]: DEBUG nova.scheduler.client.report [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Refreshing trait associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2064.278934] env[62684]: DEBUG oslo_vmware.api [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053185, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2064.289792] env[62684]: DEBUG oslo_vmware.api [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053186, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2064.357703] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fbfa44ec-40d6-411c-9018-677b6fcab67e tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Lock "9964237b-db9b-49cc-a9bd-d62329ea564e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.398s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2064.423697] env[62684]: INFO nova.compute.manager [-] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Took 1.30 seconds to deallocate network for instance. [ 2064.449423] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b900f185-d348-48e3-8ce9-9585bbd63150 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.453360] env[62684]: DEBUG oslo_concurrency.lockutils [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Releasing lock "refresh_cache-983218ac-7cf3-48ef-88d8-aa9e9322df4b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2064.453851] env[62684]: DEBUG nova.compute.manager [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Instance network_info: |[{"id": "b5e1d42c-c9c1-4c43-83b7-81eac7065383", "address": "fa:16:3e:6c:54:30", "network": {"id": "7678b347-6a54-4b84-9a4d-b566bbeb1ea4", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-51664912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76d88ac878d44480b3b54b24ab87efa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5e1d42c-c9", "ovs_interfaceid": "b5e1d42c-c9c1-4c43-83b7-81eac7065383", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2064.454098] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6c:54:30', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'de5fcb06-b0d0-467f-86fe-06882165ac31', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b5e1d42c-c9c1-4c43-83b7-81eac7065383', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2064.461323] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Creating folder: Project (76d88ac878d44480b3b54b24ab87efa9). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2064.463758] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-968af62e-7696-4628-9bf4-bf038b71d220 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.466692] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e184856e-413e-49ed-a308-98c0ed25c7f1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.499016] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-559f484b-380f-4002-87b9-7a73c03163a6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.501687] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Created folder: Project (76d88ac878d44480b3b54b24ab87efa9) in parent group-v421118. [ 2064.501908] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Creating folder: Instances. Parent ref: group-v421326. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2064.502459] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-daa6a09d-2fdc-495f-aa73-ad2c066264a9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.510023] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-719db3f1-526f-4a7f-af85-b8f85a817cec {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.514445] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Created folder: Instances in parent group-v421326. [ 2064.514682] env[62684]: DEBUG oslo.service.loopingcall [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2064.515261] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2064.515567] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-38c193f6-1ea8-4679-b9e9-859513d76496 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.538375] env[62684]: DEBUG nova.compute.provider_tree [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2064.544589] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2064.544589] env[62684]: value = "task-2053189" [ 2064.544589] env[62684]: _type = "Task" [ 2064.544589] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2064.553175] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053189, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2064.668658] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2064.670244] env[62684]: DEBUG nova.compute.manager [req-d969a623-80db-45b7-ba93-eabd7342a798 req-8a33ff77-d014-4790-9093-55aaf7a61c35 service nova] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Received event network-changed-b5e1d42c-c9c1-4c43-83b7-81eac7065383 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2064.670293] env[62684]: DEBUG nova.compute.manager [req-d969a623-80db-45b7-ba93-eabd7342a798 req-8a33ff77-d014-4790-9093-55aaf7a61c35 service nova] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Refreshing instance network info cache due to event network-changed-b5e1d42c-c9c1-4c43-83b7-81eac7065383. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2064.671026] env[62684]: DEBUG oslo_concurrency.lockutils [req-d969a623-80db-45b7-ba93-eabd7342a798 req-8a33ff77-d014-4790-9093-55aaf7a61c35 service nova] Acquiring lock "refresh_cache-983218ac-7cf3-48ef-88d8-aa9e9322df4b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2064.671026] env[62684]: DEBUG oslo_concurrency.lockutils [req-d969a623-80db-45b7-ba93-eabd7342a798 req-8a33ff77-d014-4790-9093-55aaf7a61c35 service nova] Acquired lock "refresh_cache-983218ac-7cf3-48ef-88d8-aa9e9322df4b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2064.671026] env[62684]: DEBUG nova.network.neutron [req-d969a623-80db-45b7-ba93-eabd7342a798 req-8a33ff77-d014-4790-9093-55aaf7a61c35 service nova] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Refreshing network info cache for port b5e1d42c-c9c1-4c43-83b7-81eac7065383 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2064.700492] env[62684]: DEBUG oslo_vmware.api [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053184, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2064.728033] env[62684]: DEBUG nova.compute.manager [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2064.753540] env[62684]: DEBUG nova.virt.hardware [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2064.753829] env[62684]: DEBUG nova.virt.hardware [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2064.753948] env[62684]: DEBUG nova.virt.hardware [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2064.754153] env[62684]: DEBUG nova.virt.hardware [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2064.754307] env[62684]: DEBUG nova.virt.hardware [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2064.754461] env[62684]: DEBUG nova.virt.hardware [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2064.754712] env[62684]: DEBUG nova.virt.hardware [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2064.754832] env[62684]: DEBUG nova.virt.hardware [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2064.755013] env[62684]: DEBUG nova.virt.hardware [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2064.755244] env[62684]: DEBUG nova.virt.hardware [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2064.755435] env[62684]: DEBUG nova.virt.hardware [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2064.756292] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-097da30d-b1c2-4896-8cf0-47aa9048f189 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.764401] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2d37848-90b2-4792-a6b7-b4a47bc29c1b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.788474] env[62684]: DEBUG oslo_vmware.api [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053185, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.912313} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2064.791396] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2064.791679] env[62684]: DEBUG oslo_vmware.api [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053186, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2064.792386] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1ad6e19-99a6-4e86-9903-cc62c53753da {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.813376] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] 42ae6edd-e1f5-4ef8-a248-8f02e94d798e/42ae6edd-e1f5-4ef8-a248-8f02e94d798e.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2064.813615] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f1825b67-6a85-4ccf-9379-3ac61fb5025b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.832844] env[62684]: DEBUG oslo_vmware.api [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2064.832844] env[62684]: value = "task-2053190" [ 2064.832844] env[62684]: _type = "Task" [ 2064.832844] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2064.840143] env[62684]: DEBUG oslo_vmware.api [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053190, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2064.931779] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9d94250a-62a3-4e66-b394-66b8a8078402 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2065.055758] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053189, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.081544] env[62684]: DEBUG nova.scheduler.client.report [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Updated inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f with generation 108 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2065.081855] env[62684]: DEBUG nova.compute.provider_tree [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Updating resource provider c23c281e-ec1f-4876-972e-a98655f2084f generation from 108 to 109 during operation: update_inventory {{(pid=62684) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2065.082066] env[62684]: DEBUG nova.compute.provider_tree [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2065.202072] env[62684]: DEBUG oslo_vmware.api [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053184, 'name': ReconfigVM_Task, 'duration_secs': 1.344042} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2065.202072] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Reconfigured VM instance instance-00000049 to attach disk [datastore1] 845b2e2a-cee0-4598-afbd-1f07aa52468f/845b2e2a-cee0-4598-afbd-1f07aa52468f.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2065.205623] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e9a45488-a40d-4561-a929-80781500b1b0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.209552] env[62684]: DEBUG oslo_vmware.api [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for the task: (returnval){ [ 2065.209552] env[62684]: value = "task-2053191" [ 2065.209552] env[62684]: _type = "Task" [ 2065.209552] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2065.229286] env[62684]: DEBUG oslo_vmware.api [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053191, 'name': Rename_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.262847] env[62684]: DEBUG nova.compute.manager [req-b321834d-c3f4-4b66-a916-1d6e827c170d req-5cde16c4-2ed6-483c-9b2d-0bc31e8ef569 service nova] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Received event network-vif-plugged-868206c0-a9c5-4e53-a798-830df2d0c619 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2065.262976] env[62684]: DEBUG oslo_concurrency.lockutils [req-b321834d-c3f4-4b66-a916-1d6e827c170d req-5cde16c4-2ed6-483c-9b2d-0bc31e8ef569 service nova] Acquiring lock "2aac4230-2070-48be-b91a-5cb4218a0574-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2065.263471] env[62684]: DEBUG oslo_concurrency.lockutils [req-b321834d-c3f4-4b66-a916-1d6e827c170d req-5cde16c4-2ed6-483c-9b2d-0bc31e8ef569 service nova] Lock "2aac4230-2070-48be-b91a-5cb4218a0574-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2065.263555] env[62684]: DEBUG oslo_concurrency.lockutils [req-b321834d-c3f4-4b66-a916-1d6e827c170d req-5cde16c4-2ed6-483c-9b2d-0bc31e8ef569 service nova] Lock "2aac4230-2070-48be-b91a-5cb4218a0574-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2065.263807] env[62684]: DEBUG nova.compute.manager [req-b321834d-c3f4-4b66-a916-1d6e827c170d req-5cde16c4-2ed6-483c-9b2d-0bc31e8ef569 service nova] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] No waiting events found dispatching network-vif-plugged-868206c0-a9c5-4e53-a798-830df2d0c619 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2065.263990] env[62684]: WARNING nova.compute.manager [req-b321834d-c3f4-4b66-a916-1d6e827c170d req-5cde16c4-2ed6-483c-9b2d-0bc31e8ef569 service nova] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Received unexpected event network-vif-plugged-868206c0-a9c5-4e53-a798-830df2d0c619 for instance with vm_state building and task_state spawning. [ 2065.290575] env[62684]: DEBUG oslo_vmware.api [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053186, 'name': ReconfigVM_Task, 'duration_secs': 1.134249} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2065.292478] env[62684]: DEBUG nova.network.neutron [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Successfully updated port: 868206c0-a9c5-4e53-a798-830df2d0c619 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2065.293222] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Reconfigured VM instance instance-0000004b to attach disk [datastore2] 2f8f7e02-54fb-4275-badb-35c0b840ab33/2f8f7e02-54fb-4275-badb-35c0b840ab33.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2065.296557] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d01726bf-7785-4b0d-bc6b-005c1f32694a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.305025] env[62684]: DEBUG oslo_vmware.api [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Waiting for the task: (returnval){ [ 2065.305025] env[62684]: value = "task-2053192" [ 2065.305025] env[62684]: _type = "Task" [ 2065.305025] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2065.315271] env[62684]: DEBUG oslo_vmware.api [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053192, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.344360] env[62684]: DEBUG oslo_vmware.api [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053190, 'name': ReconfigVM_Task, 'duration_secs': 0.260717} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2065.344743] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Reconfigured VM instance instance-00000048 to attach disk [datastore1] 42ae6edd-e1f5-4ef8-a248-8f02e94d798e/42ae6edd-e1f5-4ef8-a248-8f02e94d798e.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2065.345545] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0a5507e4-ec5f-4d98-8ac0-0dc295f1749c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.351984] env[62684]: DEBUG oslo_vmware.api [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2065.351984] env[62684]: value = "task-2053193" [ 2065.351984] env[62684]: _type = "Task" [ 2065.351984] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2065.361170] env[62684]: DEBUG oslo_vmware.api [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053193, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.419237] env[62684]: DEBUG nova.network.neutron [req-d969a623-80db-45b7-ba93-eabd7342a798 req-8a33ff77-d014-4790-9093-55aaf7a61c35 service nova] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Updated VIF entry in instance network info cache for port b5e1d42c-c9c1-4c43-83b7-81eac7065383. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2065.419722] env[62684]: DEBUG nova.network.neutron [req-d969a623-80db-45b7-ba93-eabd7342a798 req-8a33ff77-d014-4790-9093-55aaf7a61c35 service nova] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Updating instance_info_cache with network_info: [{"id": "b5e1d42c-c9c1-4c43-83b7-81eac7065383", "address": "fa:16:3e:6c:54:30", "network": {"id": "7678b347-6a54-4b84-9a4d-b566bbeb1ea4", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-51664912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76d88ac878d44480b3b54b24ab87efa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5e1d42c-c9", "ovs_interfaceid": "b5e1d42c-c9c1-4c43-83b7-81eac7065383", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2065.555855] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053189, 'name': CreateVM_Task, 'duration_secs': 0.685945} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2065.556205] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2065.556723] env[62684]: DEBUG oslo_concurrency.lockutils [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2065.556901] env[62684]: DEBUG oslo_concurrency.lockutils [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2065.557276] env[62684]: DEBUG oslo_concurrency.lockutils [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2065.557533] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15fa6075-ac1a-4430-98bc-b266efbf1292 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.562271] env[62684]: DEBUG oslo_vmware.api [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2065.562271] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c9cf74-7e36-e011-636f-58ec9f036169" [ 2065.562271] env[62684]: _type = "Task" [ 2065.562271] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2065.569656] env[62684]: DEBUG oslo_vmware.api [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c9cf74-7e36-e011-636f-58ec9f036169, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.587464] env[62684]: DEBUG oslo_concurrency.lockutils [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.893s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2065.589591] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fad6caa6-e54b-4af6-aecb-d0bbe906712a tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.359s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2065.589886] env[62684]: DEBUG nova.objects.instance [None req-fad6caa6-e54b-4af6-aecb-d0bbe906712a tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Lazy-loading 'resources' on Instance uuid 31419285-9fdf-4d37-94d7-d1b08c6b6b05 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2065.608288] env[62684]: INFO nova.scheduler.client.report [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Deleted allocations for instance feca8680-4baa-4b2c-9875-69a88b351dc0 [ 2065.720425] env[62684]: DEBUG oslo_vmware.api [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053191, 'name': Rename_Task, 'duration_secs': 0.147433} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2065.720736] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2065.721039] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e874ae77-0fb8-424a-b2fa-8567715fb041 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.728404] env[62684]: DEBUG oslo_vmware.api [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for the task: (returnval){ [ 2065.728404] env[62684]: value = "task-2053194" [ 2065.728404] env[62684]: _type = "Task" [ 2065.728404] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2065.736587] env[62684]: DEBUG oslo_vmware.api [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053194, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.797966] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Acquiring lock "refresh_cache-2aac4230-2070-48be-b91a-5cb4218a0574" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2065.797966] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Acquired lock "refresh_cache-2aac4230-2070-48be-b91a-5cb4218a0574" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2065.797966] env[62684]: DEBUG nova.network.neutron [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2065.816781] env[62684]: DEBUG oslo_vmware.api [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053192, 'name': Rename_Task, 'duration_secs': 0.166318} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2065.817101] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2065.817378] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e4e6a6d7-0379-4c9f-a207-6d7a6b603ecb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.824197] env[62684]: DEBUG oslo_vmware.api [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Waiting for the task: (returnval){ [ 2065.824197] env[62684]: value = "task-2053195" [ 2065.824197] env[62684]: _type = "Task" [ 2065.824197] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2065.833028] env[62684]: DEBUG oslo_vmware.api [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053195, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.862128] env[62684]: DEBUG oslo_vmware.api [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053193, 'name': Rename_Task, 'duration_secs': 0.149505} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2065.862438] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2065.862692] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-190250b0-e184-4896-a320-b1916f668866 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.869869] env[62684]: DEBUG oslo_vmware.api [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2065.869869] env[62684]: value = "task-2053196" [ 2065.869869] env[62684]: _type = "Task" [ 2065.869869] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2065.878556] env[62684]: DEBUG oslo_vmware.api [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053196, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.922736] env[62684]: DEBUG oslo_concurrency.lockutils [req-d969a623-80db-45b7-ba93-eabd7342a798 req-8a33ff77-d014-4790-9093-55aaf7a61c35 service nova] Releasing lock "refresh_cache-983218ac-7cf3-48ef-88d8-aa9e9322df4b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2065.923049] env[62684]: DEBUG nova.compute.manager [req-d969a623-80db-45b7-ba93-eabd7342a798 req-8a33ff77-d014-4790-9093-55aaf7a61c35 service nova] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Received event network-vif-deleted-77fad839-d56b-4f69-ae2c-c846fad13348 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2066.074151] env[62684]: DEBUG oslo_vmware.api [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c9cf74-7e36-e011-636f-58ec9f036169, 'name': SearchDatastore_Task, 'duration_secs': 0.008891} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2066.074519] env[62684]: DEBUG oslo_concurrency.lockutils [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2066.074780] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2066.075096] env[62684]: DEBUG oslo_concurrency.lockutils [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2066.075295] env[62684]: DEBUG oslo_concurrency.lockutils [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2066.075474] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2066.075768] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bad8df2e-8601-4d1b-ab54-88bd93a670fe {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.084861] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2066.085115] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2066.085977] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d158e7d2-e0f7-4b9b-8867-80fd35087ff8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.094125] env[62684]: DEBUG oslo_vmware.api [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2066.094125] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a245d6-26bd-f4a4-73f0-98378f587835" [ 2066.094125] env[62684]: _type = "Task" [ 2066.094125] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2066.102779] env[62684]: DEBUG oslo_vmware.api [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a245d6-26bd-f4a4-73f0-98378f587835, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.115404] env[62684]: DEBUG oslo_concurrency.lockutils [None req-972ed16e-9b9e-49e3-8e75-abd103016e1d tempest-ServersWithSpecificFlavorTestJSON-1258695446 tempest-ServersWithSpecificFlavorTestJSON-1258695446-project-member] Lock "feca8680-4baa-4b2c-9875-69a88b351dc0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.624s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2066.241904] env[62684]: DEBUG oslo_vmware.api [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053194, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.339904] env[62684]: DEBUG oslo_vmware.api [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053195, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.344644] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e59fc0f8-e5fd-4808-8929-1bdce3c25fcf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.350419] env[62684]: DEBUG nova.network.neutron [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2066.353252] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-386a65aa-15c7-4ecd-82e6-df3f47a39c4a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.390804] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6bf3d2e-0021-49d0-b696-e03c3e5208cb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.399177] env[62684]: DEBUG oslo_vmware.api [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053196, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.402312] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a63a814c-33e1-44b6-bf77-4a548370ccdf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.422738] env[62684]: DEBUG nova.compute.provider_tree [None req-fad6caa6-e54b-4af6-aecb-d0bbe906712a tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2066.531338] env[62684]: DEBUG nova.network.neutron [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Updating instance_info_cache with network_info: [{"id": "868206c0-a9c5-4e53-a798-830df2d0c619", "address": "fa:16:3e:b1:8b:1a", "network": {"id": "f0abeae7-2fa1-4b1e-80dd-ff6898d96292", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-2015925501-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "200eebbae628463b9decddf3546b18df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98e21102-8954-4f6f-b1e6-5d764a53aa22", "external-id": "nsx-vlan-transportzone-838", "segmentation_id": 838, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap868206c0-a9", "ovs_interfaceid": "868206c0-a9c5-4e53-a798-830df2d0c619", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2066.605754] env[62684]: DEBUG oslo_vmware.api [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a245d6-26bd-f4a4-73f0-98378f587835, 'name': SearchDatastore_Task, 'duration_secs': 0.010017} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2066.606533] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0e60bd2-e759-4e1b-accc-9458f318000d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.612024] env[62684]: DEBUG oslo_vmware.api [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2066.612024] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]524c237b-3133-4649-13c0-b80c893c37c8" [ 2066.612024] env[62684]: _type = "Task" [ 2066.612024] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2066.619352] env[62684]: DEBUG oslo_vmware.api [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]524c237b-3133-4649-13c0-b80c893c37c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.741168] env[62684]: DEBUG oslo_vmware.api [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053194, 'name': PowerOnVM_Task, 'duration_secs': 0.619287} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2066.741447] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2066.741654] env[62684]: INFO nova.compute.manager [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Took 11.65 seconds to spawn the instance on the hypervisor. [ 2066.741843] env[62684]: DEBUG nova.compute.manager [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2066.742873] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-273f304b-4773-43d7-a758-b9feabe2213f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.838252] env[62684]: DEBUG oslo_vmware.api [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053195, 'name': PowerOnVM_Task, 'duration_secs': 0.607487} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2066.838560] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2066.838773] env[62684]: INFO nova.compute.manager [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Took 6.92 seconds to spawn the instance on the hypervisor. [ 2066.838959] env[62684]: DEBUG nova.compute.manager [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2066.839818] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0242bae-4a5a-4b7c-8036-5b0d7a41a0d2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.894778] env[62684]: DEBUG oslo_vmware.api [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053196, 'name': PowerOnVM_Task, 'duration_secs': 0.588026} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2066.895644] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2066.895975] env[62684]: INFO nova.compute.manager [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Took 14.43 seconds to spawn the instance on the hypervisor. [ 2066.896278] env[62684]: DEBUG nova.compute.manager [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2066.897157] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67ac9f7b-1318-423a-a137-3ac8a8138678 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.969126] env[62684]: DEBUG nova.scheduler.client.report [None req-fad6caa6-e54b-4af6-aecb-d0bbe906712a tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Updated inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f with generation 109 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2066.969449] env[62684]: DEBUG nova.compute.provider_tree [None req-fad6caa6-e54b-4af6-aecb-d0bbe906712a tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Updating resource provider c23c281e-ec1f-4876-972e-a98655f2084f generation from 109 to 110 during operation: update_inventory {{(pid=62684) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2066.969649] env[62684]: DEBUG nova.compute.provider_tree [None req-fad6caa6-e54b-4af6-aecb-d0bbe906712a tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2067.033579] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Releasing lock "refresh_cache-2aac4230-2070-48be-b91a-5cb4218a0574" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2067.033917] env[62684]: DEBUG nova.compute.manager [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Instance network_info: |[{"id": "868206c0-a9c5-4e53-a798-830df2d0c619", "address": "fa:16:3e:b1:8b:1a", "network": {"id": "f0abeae7-2fa1-4b1e-80dd-ff6898d96292", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-2015925501-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "200eebbae628463b9decddf3546b18df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98e21102-8954-4f6f-b1e6-5d764a53aa22", "external-id": "nsx-vlan-transportzone-838", "segmentation_id": 838, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap868206c0-a9", "ovs_interfaceid": "868206c0-a9c5-4e53-a798-830df2d0c619", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2067.034392] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b1:8b:1a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '98e21102-8954-4f6f-b1e6-5d764a53aa22', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '868206c0-a9c5-4e53-a798-830df2d0c619', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2067.041936] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Creating folder: Project (200eebbae628463b9decddf3546b18df). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2067.042240] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6801db18-38f6-4abd-b27d-7381ec6bcacb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.054641] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Created folder: Project (200eebbae628463b9decddf3546b18df) in parent group-v421118. [ 2067.054845] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Creating folder: Instances. Parent ref: group-v421329. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2067.055105] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-792d19b1-6c65-4851-98e4-a4b246a8f8a1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.064151] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Created folder: Instances in parent group-v421329. [ 2067.064435] env[62684]: DEBUG oslo.service.loopingcall [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2067.064650] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2067.064959] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5584b487-4d92-4816-ad39-d693242c8768 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.087317] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2067.087317] env[62684]: value = "task-2053199" [ 2067.087317] env[62684]: _type = "Task" [ 2067.087317] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2067.095167] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053199, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2067.121899] env[62684]: DEBUG oslo_vmware.api [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]524c237b-3133-4649-13c0-b80c893c37c8, 'name': SearchDatastore_Task, 'duration_secs': 0.009181} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2067.122190] env[62684]: DEBUG oslo_concurrency.lockutils [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2067.122447] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 983218ac-7cf3-48ef-88d8-aa9e9322df4b/983218ac-7cf3-48ef-88d8-aa9e9322df4b.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2067.122706] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a1b42abe-a8e9-4612-91e3-483461457715 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.129256] env[62684]: DEBUG oslo_vmware.api [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2067.129256] env[62684]: value = "task-2053200" [ 2067.129256] env[62684]: _type = "Task" [ 2067.129256] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2067.137504] env[62684]: DEBUG oslo_vmware.api [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053200, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2067.262095] env[62684]: INFO nova.compute.manager [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Took 16.20 seconds to build instance. [ 2067.307454] env[62684]: DEBUG nova.compute.manager [req-b58e06b5-fd06-4adf-a76c-5c1bfa83d039 req-ec5d9ddf-5ebe-4e8c-aef0-9e56f292033a service nova] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Received event network-changed-868206c0-a9c5-4e53-a798-830df2d0c619 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2067.307690] env[62684]: DEBUG nova.compute.manager [req-b58e06b5-fd06-4adf-a76c-5c1bfa83d039 req-ec5d9ddf-5ebe-4e8c-aef0-9e56f292033a service nova] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Refreshing instance network info cache due to event network-changed-868206c0-a9c5-4e53-a798-830df2d0c619. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2067.307937] env[62684]: DEBUG oslo_concurrency.lockutils [req-b58e06b5-fd06-4adf-a76c-5c1bfa83d039 req-ec5d9ddf-5ebe-4e8c-aef0-9e56f292033a service nova] Acquiring lock "refresh_cache-2aac4230-2070-48be-b91a-5cb4218a0574" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2067.308125] env[62684]: DEBUG oslo_concurrency.lockutils [req-b58e06b5-fd06-4adf-a76c-5c1bfa83d039 req-ec5d9ddf-5ebe-4e8c-aef0-9e56f292033a service nova] Acquired lock "refresh_cache-2aac4230-2070-48be-b91a-5cb4218a0574" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2067.308294] env[62684]: DEBUG nova.network.neutron [req-b58e06b5-fd06-4adf-a76c-5c1bfa83d039 req-ec5d9ddf-5ebe-4e8c-aef0-9e56f292033a service nova] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Refreshing network info cache for port 868206c0-a9c5-4e53-a798-830df2d0c619 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2067.359083] env[62684]: INFO nova.compute.manager [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Took 14.70 seconds to build instance. [ 2067.422389] env[62684]: INFO nova.compute.manager [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Took 20.08 seconds to build instance. [ 2067.477388] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fad6caa6-e54b-4af6-aecb-d0bbe906712a tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.887s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2067.481895] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.813s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2067.483181] env[62684]: DEBUG nova.objects.instance [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lazy-loading 'resources' on Instance uuid b4cd871a-30ea-4b7a-98ad-00b8676dc2cd {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2067.513275] env[62684]: INFO nova.scheduler.client.report [None req-fad6caa6-e54b-4af6-aecb-d0bbe906712a tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Deleted allocations for instance 31419285-9fdf-4d37-94d7-d1b08c6b6b05 [ 2067.601461] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053199, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2067.640311] env[62684]: DEBUG oslo_vmware.api [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053200, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2067.765226] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3fcd92ce-acfb-424d-89c2-44ed53ea3b4e tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Lock "845b2e2a-cee0-4598-afbd-1f07aa52468f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.715s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2067.862265] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3fea0f30-205a-4435-91bf-c78e995a1315 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Lock "2f8f7e02-54fb-4275-badb-35c0b840ab33" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.213s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2067.925251] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c9434fce-a817-4d81-ac03-70049053e452 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "42ae6edd-e1f5-4ef8-a248-8f02e94d798e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.587s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2068.022341] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fad6caa6-e54b-4af6-aecb-d0bbe906712a tempest-ServersTestFqdnHostnames-1398608514 tempest-ServersTestFqdnHostnames-1398608514-project-member] Lock "31419285-9fdf-4d37-94d7-d1b08c6b6b05" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.460s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2068.048400] env[62684]: DEBUG nova.network.neutron [req-b58e06b5-fd06-4adf-a76c-5c1bfa83d039 req-ec5d9ddf-5ebe-4e8c-aef0-9e56f292033a service nova] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Updated VIF entry in instance network info cache for port 868206c0-a9c5-4e53-a798-830df2d0c619. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2068.048769] env[62684]: DEBUG nova.network.neutron [req-b58e06b5-fd06-4adf-a76c-5c1bfa83d039 req-ec5d9ddf-5ebe-4e8c-aef0-9e56f292033a service nova] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Updating instance_info_cache with network_info: [{"id": "868206c0-a9c5-4e53-a798-830df2d0c619", "address": "fa:16:3e:b1:8b:1a", "network": {"id": "f0abeae7-2fa1-4b1e-80dd-ff6898d96292", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-2015925501-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "200eebbae628463b9decddf3546b18df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98e21102-8954-4f6f-b1e6-5d764a53aa22", "external-id": "nsx-vlan-transportzone-838", "segmentation_id": 838, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap868206c0-a9", "ovs_interfaceid": "868206c0-a9c5-4e53-a798-830df2d0c619", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2068.102123] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053199, 'name': CreateVM_Task, 'duration_secs': 0.794374} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2068.102617] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2068.103587] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2068.103853] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2068.104194] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2068.104514] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e662d731-e752-4783-8565-65e6cbc840df {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.110050] env[62684]: DEBUG oslo_vmware.api [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Waiting for the task: (returnval){ [ 2068.110050] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5256b74d-253a-9451-6ae2-42211a22e7b7" [ 2068.110050] env[62684]: _type = "Task" [ 2068.110050] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2068.122385] env[62684]: DEBUG oslo_vmware.api [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5256b74d-253a-9451-6ae2-42211a22e7b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2068.142134] env[62684]: DEBUG oslo_vmware.api [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053200, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.510747} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2068.142382] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 983218ac-7cf3-48ef-88d8-aa9e9322df4b/983218ac-7cf3-48ef-88d8-aa9e9322df4b.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2068.142602] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2068.143059] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-75d56654-28ce-42e4-be84-a53e34020e53 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.151625] env[62684]: DEBUG oslo_vmware.api [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2068.151625] env[62684]: value = "task-2053201" [ 2068.151625] env[62684]: _type = "Task" [ 2068.151625] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2068.166316] env[62684]: DEBUG oslo_vmware.api [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053201, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2068.280035] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c6e26cc-38ba-452f-bdfc-22a37e636886 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.290058] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ef8069c-6aeb-4cfe-9c80-edbcb109cc95 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.325274] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-102caab1-47b1-4053-b53e-a172e9e05dbe {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.333300] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c4ed197-4a7b-44e2-837c-dd36ef278fdb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.347999] env[62684]: DEBUG nova.compute.provider_tree [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2068.552739] env[62684]: DEBUG oslo_concurrency.lockutils [req-b58e06b5-fd06-4adf-a76c-5c1bfa83d039 req-ec5d9ddf-5ebe-4e8c-aef0-9e56f292033a service nova] Releasing lock "refresh_cache-2aac4230-2070-48be-b91a-5cb4218a0574" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2068.625750] env[62684]: DEBUG oslo_vmware.api [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5256b74d-253a-9451-6ae2-42211a22e7b7, 'name': SearchDatastore_Task, 'duration_secs': 0.014281} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2068.626449] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2068.626449] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2068.626449] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2068.626773] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2068.626773] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2068.627110] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d78be0bd-9f88-4b1a-a8ca-556b53daa4ce {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.641647] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2068.641860] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2068.642626] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b510a325-8171-4a8f-9592-cf7c68cef708 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.648811] env[62684]: DEBUG oslo_vmware.api [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Waiting for the task: (returnval){ [ 2068.648811] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]522784ac-424a-90a6-ebcf-a3feba931374" [ 2068.648811] env[62684]: _type = "Task" [ 2068.648811] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2068.664027] env[62684]: DEBUG oslo_vmware.api [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053201, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.36792} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2068.667766] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2068.667766] env[62684]: DEBUG oslo_vmware.api [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]522784ac-424a-90a6-ebcf-a3feba931374, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2068.667766] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14f3af4e-3b11-4d37-9e9a-6e4bf01e5fe9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.699152] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] 983218ac-7cf3-48ef-88d8-aa9e9322df4b/983218ac-7cf3-48ef-88d8-aa9e9322df4b.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2068.700055] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-89e1d112-eeaf-41a0-a624-97609ef7aabc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.728505] env[62684]: INFO nova.compute.manager [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Rebuilding instance [ 2068.738665] env[62684]: DEBUG oslo_vmware.api [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2068.738665] env[62684]: value = "task-2053202" [ 2068.738665] env[62684]: _type = "Task" [ 2068.738665] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2068.753258] env[62684]: DEBUG oslo_vmware.api [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053202, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2068.769244] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "23578214-6708-43ae-88ce-56212083532a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2068.769767] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "23578214-6708-43ae-88ce-56212083532a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2068.796956] env[62684]: DEBUG nova.compute.manager [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2068.798562] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92ab9937-55cb-495a-be2e-ac286cbda6c4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.874413] env[62684]: ERROR nova.scheduler.client.report [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [req-7253bb7f-150c-4197-a2da-515acb38aeaf] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c23c281e-ec1f-4876-972e-a98655f2084f. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-7253bb7f-150c-4197-a2da-515acb38aeaf"}]} [ 2068.894768] env[62684]: DEBUG nova.scheduler.client.report [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Refreshing inventories for resource provider c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2068.916030] env[62684]: DEBUG nova.scheduler.client.report [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Updating ProviderTree inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2068.916030] env[62684]: DEBUG nova.compute.provider_tree [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2068.936192] env[62684]: DEBUG nova.scheduler.client.report [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Refreshing aggregate associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, aggregates: None {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2068.956958] env[62684]: DEBUG nova.scheduler.client.report [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Refreshing trait associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2069.164309] env[62684]: DEBUG oslo_vmware.api [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]522784ac-424a-90a6-ebcf-a3feba931374, 'name': SearchDatastore_Task, 'duration_secs': 0.033682} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2069.165289] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ace8f57d-7af8-4599-9f14-e9426982a51a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.175177] env[62684]: DEBUG oslo_vmware.api [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Waiting for the task: (returnval){ [ 2069.175177] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5260cea9-cba6-8858-a17a-d95bc35a65d0" [ 2069.175177] env[62684]: _type = "Task" [ 2069.175177] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2069.184846] env[62684]: DEBUG oslo_vmware.api [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5260cea9-cba6-8858-a17a-d95bc35a65d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2069.249690] env[62684]: DEBUG oslo_vmware.api [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053202, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2069.273251] env[62684]: DEBUG nova.compute.manager [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 23578214-6708-43ae-88ce-56212083532a] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2069.302691] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d6536ad-f4bb-4122-91a9-bcc0269bccf0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.311591] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b8706d1-2f38-4f75-b876-8b3f881941da {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.315808] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2069.316151] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c27b3c22-1677-4d1b-bc6b-0421bb64f9d3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.350469] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b727aab-9d32-462f-bb7a-d70c90be4ef6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.358085] env[62684]: DEBUG oslo_vmware.api [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Waiting for the task: (returnval){ [ 2069.358085] env[62684]: value = "task-2053203" [ 2069.358085] env[62684]: _type = "Task" [ 2069.358085] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2069.368203] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-567f9271-1479-454b-85c2-5e600884a863 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.376427] env[62684]: DEBUG oslo_vmware.api [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053203, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2069.388330] env[62684]: DEBUG nova.compute.provider_tree [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2069.690094] env[62684]: DEBUG oslo_vmware.api [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5260cea9-cba6-8858-a17a-d95bc35a65d0, 'name': SearchDatastore_Task, 'duration_secs': 0.010146} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2069.690397] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2069.690659] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 2aac4230-2070-48be-b91a-5cb4218a0574/2aac4230-2070-48be-b91a-5cb4218a0574.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2069.690942] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b820ae2a-a43e-4220-811c-4867e4937064 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.698922] env[62684]: DEBUG oslo_vmware.api [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Waiting for the task: (returnval){ [ 2069.698922] env[62684]: value = "task-2053204" [ 2069.698922] env[62684]: _type = "Task" [ 2069.698922] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2069.710601] env[62684]: DEBUG oslo_vmware.api [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Task: {'id': task-2053204, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2069.755889] env[62684]: DEBUG oslo_vmware.api [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053202, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2069.799941] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2069.872833] env[62684]: DEBUG oslo_vmware.api [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053203, 'name': PowerOffVM_Task, 'duration_secs': 0.304713} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2069.873497] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2069.873497] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2069.874210] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab4b9fbb-7431-4637-8531-86b3573037de {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.881779] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2069.882045] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-81f2988d-882d-451e-a7bb-31f907d5cb51 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.907843] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2069.908137] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2069.908248] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Deleting the datastore file [datastore2] 2f8f7e02-54fb-4275-badb-35c0b840ab33 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2069.908523] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-779abbd9-72f1-430d-9da9-9e3c80c8ec3e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.916923] env[62684]: DEBUG oslo_vmware.api [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Waiting for the task: (returnval){ [ 2069.916923] env[62684]: value = "task-2053206" [ 2069.916923] env[62684]: _type = "Task" [ 2069.916923] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2069.926843] env[62684]: DEBUG oslo_vmware.api [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053206, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2069.930975] env[62684]: DEBUG nova.scheduler.client.report [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Updated inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f with generation 111 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2069.930975] env[62684]: DEBUG nova.compute.provider_tree [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Updating resource provider c23c281e-ec1f-4876-972e-a98655f2084f generation from 111 to 112 during operation: update_inventory {{(pid=62684) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2069.930975] env[62684]: DEBUG nova.compute.provider_tree [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2070.212274] env[62684]: DEBUG oslo_vmware.api [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Task: {'id': task-2053204, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2070.253030] env[62684]: DEBUG oslo_vmware.api [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053202, 'name': ReconfigVM_Task, 'duration_secs': 1.406416} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2070.253030] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Reconfigured VM instance instance-0000004c to attach disk [datastore1] 983218ac-7cf3-48ef-88d8-aa9e9322df4b/983218ac-7cf3-48ef-88d8-aa9e9322df4b.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2070.253289] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e07b534b-14ec-4752-a3a1-81ad2c4e69ac {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.259996] env[62684]: DEBUG oslo_vmware.api [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2070.259996] env[62684]: value = "task-2053207" [ 2070.259996] env[62684]: _type = "Task" [ 2070.259996] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2070.269778] env[62684]: DEBUG oslo_vmware.api [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053207, 'name': Rename_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2070.368397] env[62684]: DEBUG nova.compute.manager [None req-f5a00295-cbb5-4284-9ff2-5c74e1e6aafc tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2070.369748] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7c9222c-ef6c-4c9e-84c8-dc0efe01a550 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.429028] env[62684]: DEBUG oslo_vmware.api [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053206, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.276849} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2070.429296] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2070.429485] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2070.429711] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2070.436750] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.955s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2070.439435] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9d94250a-62a3-4e66-b394-66b8a8078402 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.508s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2070.439584] env[62684]: DEBUG nova.objects.instance [None req-9d94250a-62a3-4e66-b394-66b8a8078402 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lazy-loading 'resources' on Instance uuid 41da0c18-dd9c-49bb-8b0d-a907575ee22e {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2070.458526] env[62684]: INFO nova.scheduler.client.report [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Deleted allocations for instance b4cd871a-30ea-4b7a-98ad-00b8676dc2cd [ 2070.712526] env[62684]: DEBUG oslo_vmware.api [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Task: {'id': task-2053204, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.554424} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2070.713042] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 2aac4230-2070-48be-b91a-5cb4218a0574/2aac4230-2070-48be-b91a-5cb4218a0574.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2070.713281] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2070.713556] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fcad765e-5c07-461e-bddd-2e18a072a883 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.720639] env[62684]: DEBUG oslo_vmware.api [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Waiting for the task: (returnval){ [ 2070.720639] env[62684]: value = "task-2053208" [ 2070.720639] env[62684]: _type = "Task" [ 2070.720639] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2070.728595] env[62684]: DEBUG oslo_vmware.api [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Task: {'id': task-2053208, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2070.772088] env[62684]: DEBUG oslo_vmware.api [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053207, 'name': Rename_Task, 'duration_secs': 0.322917} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2070.772535] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2070.772664] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ddb83da9-e74b-4f09-b3e2-e21ac4110f07 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.779687] env[62684]: DEBUG oslo_vmware.api [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2070.779687] env[62684]: value = "task-2053209" [ 2070.779687] env[62684]: _type = "Task" [ 2070.779687] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2070.787818] env[62684]: DEBUG oslo_vmware.api [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053209, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2070.882489] env[62684]: INFO nova.compute.manager [None req-f5a00295-cbb5-4284-9ff2-5c74e1e6aafc tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] instance snapshotting [ 2070.887990] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-311345c0-bd6c-4194-8e98-07eababefc2f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.909997] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-162822d8-4e95-4aec-85c8-794fe1ce93a1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.969263] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9053e44e-df1a-4923-bf73-449d117a60bc tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lock "b4cd871a-30ea-4b7a-98ad-00b8676dc2cd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.639s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2071.126944] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b9de025c-517b-4a69-961d-49a684c89246 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Acquiring lock "26303c0e-be87-41ff-a15c-e92f91f8a05f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2071.127517] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b9de025c-517b-4a69-961d-49a684c89246 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Lock "26303c0e-be87-41ff-a15c-e92f91f8a05f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2071.127803] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b9de025c-517b-4a69-961d-49a684c89246 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Acquiring lock "26303c0e-be87-41ff-a15c-e92f91f8a05f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2071.127989] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b9de025c-517b-4a69-961d-49a684c89246 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Lock "26303c0e-be87-41ff-a15c-e92f91f8a05f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2071.128219] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b9de025c-517b-4a69-961d-49a684c89246 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Lock "26303c0e-be87-41ff-a15c-e92f91f8a05f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2071.132281] env[62684]: INFO nova.compute.manager [None req-b9de025c-517b-4a69-961d-49a684c89246 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Terminating instance [ 2071.135934] env[62684]: DEBUG nova.compute.manager [None req-b9de025c-517b-4a69-961d-49a684c89246 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2071.135934] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b9de025c-517b-4a69-961d-49a684c89246 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2071.141282] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fd5df38-cf37-4121-90b6-4cdce17d8d83 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.151941] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9de025c-517b-4a69-961d-49a684c89246 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2071.151941] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eec397c1-c212-44c9-8040-1e31164ca517 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.159470] env[62684]: DEBUG oslo_vmware.api [None req-b9de025c-517b-4a69-961d-49a684c89246 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Waiting for the task: (returnval){ [ 2071.159470] env[62684]: value = "task-2053210" [ 2071.159470] env[62684]: _type = "Task" [ 2071.159470] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2071.172909] env[62684]: DEBUG oslo_vmware.api [None req-b9de025c-517b-4a69-961d-49a684c89246 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Task: {'id': task-2053210, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2071.234524] env[62684]: DEBUG oslo_vmware.api [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Task: {'id': task-2053208, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071809} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2071.234833] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2071.235728] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39bd6262-2b08-4743-9ab8-5c6d6844affd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.260222] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] 2aac4230-2070-48be-b91a-5cb4218a0574/2aac4230-2070-48be-b91a-5cb4218a0574.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2071.262996] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c763f13e-cf70-4efb-8934-fa0bc4c8c713 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.285712] env[62684]: DEBUG oslo_vmware.api [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Waiting for the task: (returnval){ [ 2071.285712] env[62684]: value = "task-2053211" [ 2071.285712] env[62684]: _type = "Task" [ 2071.285712] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2071.302029] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9338e844-1fc5-4120-aafc-b15c8a7419ad {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.302029] env[62684]: DEBUG oslo_vmware.api [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053209, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2071.308259] env[62684]: DEBUG oslo_vmware.api [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Task: {'id': task-2053211, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2071.311704] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25358dca-642a-4eec-bfa5-f3f54bd184e4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.347816] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5120b56f-4dce-43f7-bf1c-69f061a2cc70 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.356076] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e30a6745-2c69-4e4d-924e-c45b77944a53 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.372596] env[62684]: DEBUG nova.compute.provider_tree [None req-9d94250a-62a3-4e66-b394-66b8a8078402 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2071.423735] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f5a00295-cbb5-4284-9ff2-5c74e1e6aafc tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Creating Snapshot of the VM instance {{(pid=62684) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2071.425373] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-d190ebd0-ffac-418c-9588-72d98e728ada {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.435893] env[62684]: DEBUG oslo_vmware.api [None req-f5a00295-cbb5-4284-9ff2-5c74e1e6aafc tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for the task: (returnval){ [ 2071.435893] env[62684]: value = "task-2053212" [ 2071.435893] env[62684]: _type = "Task" [ 2071.435893] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2071.452324] env[62684]: DEBUG oslo_vmware.api [None req-f5a00295-cbb5-4284-9ff2-5c74e1e6aafc tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053212, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2071.476743] env[62684]: DEBUG nova.virt.hardware [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2071.477246] env[62684]: DEBUG nova.virt.hardware [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2071.477458] env[62684]: DEBUG nova.virt.hardware [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2071.477733] env[62684]: DEBUG nova.virt.hardware [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2071.477957] env[62684]: DEBUG nova.virt.hardware [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2071.478187] env[62684]: DEBUG nova.virt.hardware [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2071.478474] env[62684]: DEBUG nova.virt.hardware [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2071.478709] env[62684]: DEBUG nova.virt.hardware [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2071.478935] env[62684]: DEBUG nova.virt.hardware [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2071.479175] env[62684]: DEBUG nova.virt.hardware [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2071.479459] env[62684]: DEBUG nova.virt.hardware [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2071.480865] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-955f8a97-f69c-4857-ac93-87e0536a1dc7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.489194] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-585f259d-3aaa-4adf-81a0-563feb6777fb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.503894] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Instance VIF info [] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2071.510153] env[62684]: DEBUG oslo.service.loopingcall [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2071.510552] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2071.510929] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-43bb4bf1-5b40-44aa-b616-340ebe45a6eb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.531149] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2071.531149] env[62684]: value = "task-2053213" [ 2071.531149] env[62684]: _type = "Task" [ 2071.531149] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2071.539376] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053213, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2071.676639] env[62684]: DEBUG oslo_vmware.api [None req-b9de025c-517b-4a69-961d-49a684c89246 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Task: {'id': task-2053210, 'name': PowerOffVM_Task, 'duration_secs': 0.206447} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2071.676925] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9de025c-517b-4a69-961d-49a684c89246 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2071.677152] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b9de025c-517b-4a69-961d-49a684c89246 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2071.677420] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5cf2b991-1c88-4a8c-9595-21cdde32324e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.797200] env[62684]: DEBUG oslo_vmware.api [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Task: {'id': task-2053211, 'name': ReconfigVM_Task, 'duration_secs': 0.30532} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2071.800828] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Reconfigured VM instance instance-0000004d to attach disk [datastore1] 2aac4230-2070-48be-b91a-5cb4218a0574/2aac4230-2070-48be-b91a-5cb4218a0574.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2071.801814] env[62684]: DEBUG oslo_vmware.api [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053209, 'name': PowerOnVM_Task, 'duration_secs': 0.846927} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2071.802150] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d23d3a06-d489-4395-9819-05fe701f7fe1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.805020] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2071.805020] env[62684]: INFO nova.compute.manager [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Took 9.51 seconds to spawn the instance on the hypervisor. [ 2071.805020] env[62684]: DEBUG nova.compute.manager [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2071.805789] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a231d21-8eab-45ed-8559-273821e26fbf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.823356] env[62684]: DEBUG oslo_vmware.api [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Waiting for the task: (returnval){ [ 2071.823356] env[62684]: value = "task-2053215" [ 2071.823356] env[62684]: _type = "Task" [ 2071.823356] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2071.836586] env[62684]: DEBUG oslo_vmware.api [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Task: {'id': task-2053215, 'name': Rename_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2071.879966] env[62684]: DEBUG nova.scheduler.client.report [None req-9d94250a-62a3-4e66-b394-66b8a8078402 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2071.931096] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4d1d5241-340b-4c80-a9ef-e05a2ceecaf7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "interface-81b7949d-be24-46c9-8dc8-c249b65bb039-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2071.931437] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4d1d5241-340b-4c80-a9ef-e05a2ceecaf7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "interface-81b7949d-be24-46c9-8dc8-c249b65bb039-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2071.931861] env[62684]: DEBUG nova.objects.instance [None req-4d1d5241-340b-4c80-a9ef-e05a2ceecaf7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lazy-loading 'flavor' on Instance uuid 81b7949d-be24-46c9-8dc8-c249b65bb039 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2071.950275] env[62684]: DEBUG oslo_vmware.api [None req-f5a00295-cbb5-4284-9ff2-5c74e1e6aafc tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053212, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2072.836223] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9d94250a-62a3-4e66-b394-66b8a8078402 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.397s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2072.839404] env[62684]: DEBUG nova.objects.instance [None req-4d1d5241-340b-4c80-a9ef-e05a2ceecaf7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lazy-loading 'pci_requests' on Instance uuid 81b7949d-be24-46c9-8dc8-c249b65bb039 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2072.840510] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b9de025c-517b-4a69-961d-49a684c89246 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2072.840708] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b9de025c-517b-4a69-961d-49a684c89246 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2072.841056] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9de025c-517b-4a69-961d-49a684c89246 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Deleting the datastore file [datastore2] 26303c0e-be87-41ff-a15c-e92f91f8a05f {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2072.850452] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.049s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2072.850452] env[62684]: INFO nova.compute.claims [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 23578214-6708-43ae-88ce-56212083532a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2072.855681] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-07ed8673-f722-4ccb-a72b-1554bacf9e3d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.857724] env[62684]: INFO nova.compute.manager [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Took 20.00 seconds to build instance. [ 2072.864227] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053213, 'name': CreateVM_Task, 'duration_secs': 0.294986} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2072.864638] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2072.865091] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2072.865314] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2072.865664] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2072.871746] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d778c21-575f-46ab-809f-af4f54f1fc75 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.874116] env[62684]: DEBUG oslo_vmware.api [None req-f5a00295-cbb5-4284-9ff2-5c74e1e6aafc tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053212, 'name': CreateSnapshot_Task, 'duration_secs': 0.84083} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2072.874352] env[62684]: DEBUG oslo_vmware.api [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Task: {'id': task-2053215, 'name': Rename_Task, 'duration_secs': 0.150533} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2072.876134] env[62684]: INFO nova.scheduler.client.report [None req-9d94250a-62a3-4e66-b394-66b8a8078402 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Deleted allocations for instance 41da0c18-dd9c-49bb-8b0d-a907575ee22e [ 2072.877318] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f5a00295-cbb5-4284-9ff2-5c74e1e6aafc tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Created Snapshot of the VM instance {{(pid=62684) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2072.877607] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2072.878139] env[62684]: DEBUG oslo_vmware.api [None req-b9de025c-517b-4a69-961d-49a684c89246 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Waiting for the task: (returnval){ [ 2072.878139] env[62684]: value = "task-2053216" [ 2072.878139] env[62684]: _type = "Task" [ 2072.878139] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2072.881185] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0748e4b7-cdf7-4a1f-9364-091c9fbca469 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.884097] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c2c9c53d-33b5-4823-b80e-baf3a2108974 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.889908] env[62684]: DEBUG oslo_vmware.api [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Waiting for the task: (returnval){ [ 2072.889908] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d5f538-d1dd-eeed-e956-1d2923fe4990" [ 2072.889908] env[62684]: _type = "Task" [ 2072.889908] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2072.899884] env[62684]: DEBUG oslo_vmware.api [None req-b9de025c-517b-4a69-961d-49a684c89246 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Task: {'id': task-2053216, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2072.903758] env[62684]: DEBUG oslo_vmware.api [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Waiting for the task: (returnval){ [ 2072.903758] env[62684]: value = "task-2053217" [ 2072.903758] env[62684]: _type = "Task" [ 2072.903758] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2072.910241] env[62684]: DEBUG oslo_vmware.api [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d5f538-d1dd-eeed-e956-1d2923fe4990, 'name': SearchDatastore_Task, 'duration_secs': 0.008914} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2072.911374] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2072.911641] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2072.911875] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2072.912121] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2072.912225] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2072.912737] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-be59b19d-64b3-4458-a9fc-e521183a00a8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.917848] env[62684]: DEBUG oslo_vmware.api [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Task: {'id': task-2053217, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2072.925375] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2072.925518] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2072.926284] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85f5deff-cb26-4140-9047-e9aad5fa90b6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.932610] env[62684]: DEBUG oslo_vmware.api [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Waiting for the task: (returnval){ [ 2072.932610] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]526c3742-f928-e70a-ca05-0d771031a379" [ 2072.932610] env[62684]: _type = "Task" [ 2072.932610] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2072.942510] env[62684]: DEBUG oslo_vmware.api [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]526c3742-f928-e70a-ca05-0d771031a379, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2073.346424] env[62684]: DEBUG nova.objects.base [None req-4d1d5241-340b-4c80-a9ef-e05a2ceecaf7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Object Instance<81b7949d-be24-46c9-8dc8-c249b65bb039> lazy-loaded attributes: flavor,pci_requests {{(pid=62684) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2073.346775] env[62684]: DEBUG nova.network.neutron [None req-4d1d5241-340b-4c80-a9ef-e05a2ceecaf7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2073.359357] env[62684]: INFO nova.compute.manager [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Rebuilding instance [ 2073.363764] env[62684]: DEBUG oslo_concurrency.lockutils [None req-833d6ddf-4607-4e58-a0d0-24ab9f7a1506 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "983218ac-7cf3-48ef-88d8-aa9e9322df4b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.512s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2073.399482] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9d94250a-62a3-4e66-b394-66b8a8078402 tempest-ImagesTestJSON-303125295 tempest-ImagesTestJSON-303125295-project-member] Lock "41da0c18-dd9c-49bb-8b0d-a907575ee22e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.503s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2073.412184] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f5a00295-cbb5-4284-9ff2-5c74e1e6aafc tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Creating linked-clone VM from snapshot {{(pid=62684) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2073.412637] env[62684]: DEBUG oslo_vmware.api [None req-b9de025c-517b-4a69-961d-49a684c89246 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Task: {'id': task-2053216, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.202693} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2073.415267] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-84d2fb56-645a-424a-8d34-f2b13ef920b1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.421599] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9de025c-517b-4a69-961d-49a684c89246 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2073.421816] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b9de025c-517b-4a69-961d-49a684c89246 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2073.421997] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b9de025c-517b-4a69-961d-49a684c89246 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2073.422192] env[62684]: INFO nova.compute.manager [None req-b9de025c-517b-4a69-961d-49a684c89246 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Took 2.29 seconds to destroy the instance on the hypervisor. [ 2073.422430] env[62684]: DEBUG oslo.service.loopingcall [None req-b9de025c-517b-4a69-961d-49a684c89246 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2073.425216] env[62684]: DEBUG nova.compute.manager [-] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2073.425333] env[62684]: DEBUG nova.network.neutron [-] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2073.427209] env[62684]: DEBUG nova.compute.manager [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2073.428669] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72f43c60-3ae0-4ec8-a4d4-d01706205906 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.435953] env[62684]: DEBUG oslo_vmware.api [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Task: {'id': task-2053217, 'name': PowerOnVM_Task, 'duration_secs': 0.535107} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2073.436260] env[62684]: DEBUG oslo_vmware.api [None req-f5a00295-cbb5-4284-9ff2-5c74e1e6aafc tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for the task: (returnval){ [ 2073.436260] env[62684]: value = "task-2053218" [ 2073.436260] env[62684]: _type = "Task" [ 2073.436260] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2073.440033] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2073.440033] env[62684]: INFO nova.compute.manager [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Took 8.71 seconds to spawn the instance on the hypervisor. [ 2073.440033] env[62684]: DEBUG nova.compute.manager [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2073.440967] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4d1d5241-340b-4c80-a9ef-e05a2ceecaf7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "interface-81b7949d-be24-46c9-8dc8-c249b65bb039-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.510s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2073.444392] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c653da97-30a4-412e-b6fc-443918a7919b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.464692] env[62684]: DEBUG oslo_vmware.api [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]526c3742-f928-e70a-ca05-0d771031a379, 'name': SearchDatastore_Task, 'duration_secs': 0.022649} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2073.467625] env[62684]: DEBUG oslo_vmware.api [None req-f5a00295-cbb5-4284-9ff2-5c74e1e6aafc tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053218, 'name': CloneVM_Task} progress is 11%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2073.468432] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9e75601-226a-4b3b-a657-d64e9b33918c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.475453] env[62684]: DEBUG oslo_vmware.api [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Waiting for the task: (returnval){ [ 2073.475453] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52fd3768-bb24-14ca-fe5d-d1f3d7b267ef" [ 2073.475453] env[62684]: _type = "Task" [ 2073.475453] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2073.483465] env[62684]: DEBUG oslo_vmware.api [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52fd3768-bb24-14ca-fe5d-d1f3d7b267ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2073.659981] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Acquiring lock "4cf48f05-d643-47e6-9a0b-33415d80890c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2073.660246] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Lock "4cf48f05-d643-47e6-9a0b-33415d80890c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2073.951470] env[62684]: DEBUG oslo_vmware.api [None req-f5a00295-cbb5-4284-9ff2-5c74e1e6aafc tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053218, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2073.956649] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2073.957459] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8b99d2ff-4c61-4682-a165-4ca8e6b94fd6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.964053] env[62684]: DEBUG oslo_vmware.api [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2073.964053] env[62684]: value = "task-2053219" [ 2073.964053] env[62684]: _type = "Task" [ 2073.964053] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2073.979947] env[62684]: DEBUG oslo_vmware.api [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053219, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2073.984525] env[62684]: INFO nova.compute.manager [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Took 17.02 seconds to build instance. [ 2073.995104] env[62684]: DEBUG oslo_vmware.api [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52fd3768-bb24-14ca-fe5d-d1f3d7b267ef, 'name': SearchDatastore_Task, 'duration_secs': 0.021176} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2073.995465] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2073.995756] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 2f8f7e02-54fb-4275-badb-35c0b840ab33/2f8f7e02-54fb-4275-badb-35c0b840ab33.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2073.996063] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-59d19929-9fc5-4aed-ae00-1e374a491531 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.004873] env[62684]: DEBUG oslo_vmware.api [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Waiting for the task: (returnval){ [ 2074.004873] env[62684]: value = "task-2053220" [ 2074.004873] env[62684]: _type = "Task" [ 2074.004873] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2074.018591] env[62684]: DEBUG oslo_concurrency.lockutils [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquiring lock "daf1486b-d5c2-4341-8a27-36eeeb08cd26" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2074.018591] env[62684]: DEBUG oslo_concurrency.lockutils [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lock "daf1486b-d5c2-4341-8a27-36eeeb08cd26" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2074.020946] env[62684]: DEBUG oslo_vmware.api [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053220, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2074.162321] env[62684]: DEBUG nova.compute.manager [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2074.174478] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08d90bf3-bb7a-44bb-beea-bc1346616847 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.186868] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39a7bb8e-7a24-420e-bd5a-44fc60877cdc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.217568] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63e7b7ff-1db3-4222-b359-30ff3af8b1bd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.227952] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f37f570-5db5-4a91-89bc-a17e12138183 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.246327] env[62684]: DEBUG nova.compute.provider_tree [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2074.453467] env[62684]: DEBUG oslo_vmware.api [None req-f5a00295-cbb5-4284-9ff2-5c74e1e6aafc tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053218, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2074.474260] env[62684]: DEBUG oslo_vmware.api [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053219, 'name': PowerOffVM_Task, 'duration_secs': 0.345714} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2074.474559] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2074.474802] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2074.475667] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96a76fce-b645-45a9-b551-9880180db943 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.483788] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2074.484291] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-62cae8f2-90b4-41ab-9efa-852c3e6ed086 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.490679] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8dbfb1ec-2e03-4a3f-bf4c-b0a9e1282e37 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Lock "2aac4230-2070-48be-b91a-5cb4218a0574" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.536s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2074.518452] env[62684]: DEBUG oslo_vmware.api [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053220, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2074.525222] env[62684]: DEBUG nova.compute.manager [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2074.539141] env[62684]: DEBUG nova.network.neutron [-] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2074.562441] env[62684]: DEBUG nova.compute.manager [req-94395537-0c89-42b1-8f5e-fae7bbee16d2 req-5534e527-7a5a-4f44-a93a-23694b8af403 service nova] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Received event network-vif-deleted-d011162f-0ef0-4133-ac1b-f7ed8a3a8a3c {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2074.686498] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2074.768189] env[62684]: ERROR nova.scheduler.client.report [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [req-606eb412-46a9-4b2e-85b0-769e4c128121] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c23c281e-ec1f-4876-972e-a98655f2084f. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-606eb412-46a9-4b2e-85b0-769e4c128121"}]} [ 2074.784503] env[62684]: DEBUG nova.scheduler.client.report [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Refreshing inventories for resource provider c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2074.803734] env[62684]: DEBUG nova.scheduler.client.report [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Updating ProviderTree inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2074.804052] env[62684]: DEBUG nova.compute.provider_tree [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2074.819511] env[62684]: DEBUG nova.scheduler.client.report [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Refreshing aggregate associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, aggregates: None {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2074.828489] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2074.828743] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2074.828928] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Deleting the datastore file [datastore1] 983218ac-7cf3-48ef-88d8-aa9e9322df4b {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2074.829220] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f5fa9460-6776-4012-9062-cd9fd715e49f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.836479] env[62684]: DEBUG oslo_vmware.api [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2074.836479] env[62684]: value = "task-2053222" [ 2074.836479] env[62684]: _type = "Task" [ 2074.836479] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2074.848759] env[62684]: DEBUG oslo_vmware.api [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053222, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2074.850978] env[62684]: DEBUG nova.scheduler.client.report [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Refreshing trait associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2074.951266] env[62684]: DEBUG oslo_vmware.api [None req-f5a00295-cbb5-4284-9ff2-5c74e1e6aafc tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053218, 'name': CloneVM_Task} progress is 95%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2075.022696] env[62684]: DEBUG oslo_vmware.api [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053220, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.538499} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2075.023763] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 2f8f7e02-54fb-4275-badb-35c0b840ab33/2f8f7e02-54fb-4275-badb-35c0b840ab33.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2075.024373] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2075.027689] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-50b206fc-5202-4163-bb24-fb21ae54ee20 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.037795] env[62684]: DEBUG oslo_vmware.api [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Waiting for the task: (returnval){ [ 2075.037795] env[62684]: value = "task-2053223" [ 2075.037795] env[62684]: _type = "Task" [ 2075.037795] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2075.041221] env[62684]: INFO nova.compute.manager [-] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Took 1.62 seconds to deallocate network for instance. [ 2075.054597] env[62684]: DEBUG oslo_vmware.api [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053223, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2075.055942] env[62684]: DEBUG oslo_concurrency.lockutils [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2075.139799] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eda4c9d8-b568-4987-904f-0604faad7e13 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.147562] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dcc10fd-11c8-41c5-9ecf-2fd1aeb37d82 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.179611] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1c8b950-3f53-4d12-a413-e30798cdbf56 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.187878] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f12d8a2f-bbe5-44d1-911f-043f8dbe4b79 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.201830] env[62684]: DEBUG nova.compute.provider_tree [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2075.346564] env[62684]: DEBUG oslo_vmware.api [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053222, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.41116} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2075.346816] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2075.348038] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2075.348038] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2075.453562] env[62684]: DEBUG oslo_vmware.api [None req-f5a00295-cbb5-4284-9ff2-5c74e1e6aafc tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053218, 'name': CloneVM_Task, 'duration_secs': 1.771159} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2075.454533] env[62684]: INFO nova.virt.vmwareapi.vmops [None req-f5a00295-cbb5-4284-9ff2-5c74e1e6aafc tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Created linked-clone VM from snapshot [ 2075.454682] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72518195-b07d-44d0-95af-68ce4d7af45c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.462583] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-f5a00295-cbb5-4284-9ff2-5c74e1e6aafc tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Uploading image 715beb26-5f3c-47db-931c-9eef95752fd8 {{(pid=62684) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2075.477044] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5a00295-cbb5-4284-9ff2-5c74e1e6aafc tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Destroying the VM {{(pid=62684) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2075.477044] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-c74a7852-3472-4ab4-8d7b-97b818340042 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.483294] env[62684]: DEBUG oslo_vmware.api [None req-f5a00295-cbb5-4284-9ff2-5c74e1e6aafc tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for the task: (returnval){ [ 2075.483294] env[62684]: value = "task-2053224" [ 2075.483294] env[62684]: _type = "Task" [ 2075.483294] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2075.491727] env[62684]: DEBUG oslo_vmware.api [None req-f5a00295-cbb5-4284-9ff2-5c74e1e6aafc tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053224, 'name': Destroy_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2075.507484] env[62684]: DEBUG oslo_concurrency.lockutils [None req-36ce61a2-0bd4-4a0b-ab78-65f49c086f51 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Acquiring lock "2aac4230-2070-48be-b91a-5cb4218a0574" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2075.507876] env[62684]: DEBUG oslo_concurrency.lockutils [None req-36ce61a2-0bd4-4a0b-ab78-65f49c086f51 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Lock "2aac4230-2070-48be-b91a-5cb4218a0574" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2075.508222] env[62684]: DEBUG oslo_concurrency.lockutils [None req-36ce61a2-0bd4-4a0b-ab78-65f49c086f51 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Acquiring lock "2aac4230-2070-48be-b91a-5cb4218a0574-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2075.508434] env[62684]: DEBUG oslo_concurrency.lockutils [None req-36ce61a2-0bd4-4a0b-ab78-65f49c086f51 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Lock "2aac4230-2070-48be-b91a-5cb4218a0574-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2075.509041] env[62684]: DEBUG oslo_concurrency.lockutils [None req-36ce61a2-0bd4-4a0b-ab78-65f49c086f51 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Lock "2aac4230-2070-48be-b91a-5cb4218a0574-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2075.511744] env[62684]: INFO nova.compute.manager [None req-36ce61a2-0bd4-4a0b-ab78-65f49c086f51 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Terminating instance [ 2075.512857] env[62684]: DEBUG nova.compute.manager [None req-36ce61a2-0bd4-4a0b-ab78-65f49c086f51 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2075.513066] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-36ce61a2-0bd4-4a0b-ab78-65f49c086f51 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2075.513931] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-892dfcbe-c60a-4c4e-9a97-68e0900ecabc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.522014] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-36ce61a2-0bd4-4a0b-ab78-65f49c086f51 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2075.522282] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-682ecb16-68ea-4d83-b4e2-efa40b9afca6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.528646] env[62684]: DEBUG oslo_vmware.api [None req-36ce61a2-0bd4-4a0b-ab78-65f49c086f51 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Waiting for the task: (returnval){ [ 2075.528646] env[62684]: value = "task-2053225" [ 2075.528646] env[62684]: _type = "Task" [ 2075.528646] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2075.536719] env[62684]: DEBUG oslo_vmware.api [None req-36ce61a2-0bd4-4a0b-ab78-65f49c086f51 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Task: {'id': task-2053225, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2075.549675] env[62684]: DEBUG oslo_vmware.api [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053223, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061213} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2075.550433] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2075.551435] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6226a878-7a0f-47f8-805a-f6e8210a7a94 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.556174] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b9de025c-517b-4a69-961d-49a684c89246 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2075.571256] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Reconfiguring VM instance instance-0000004b to attach disk [datastore2] 2f8f7e02-54fb-4275-badb-35c0b840ab33/2f8f7e02-54fb-4275-badb-35c0b840ab33.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2075.571670] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-387c5d2f-0845-4506-aa49-672e09a23329 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.586377] env[62684]: DEBUG oslo_concurrency.lockutils [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "interface-81b7949d-be24-46c9-8dc8-c249b65bb039-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2075.586669] env[62684]: DEBUG oslo_concurrency.lockutils [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "interface-81b7949d-be24-46c9-8dc8-c249b65bb039-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2075.586959] env[62684]: DEBUG nova.objects.instance [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lazy-loading 'flavor' on Instance uuid 81b7949d-be24-46c9-8dc8-c249b65bb039 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2075.594203] env[62684]: DEBUG oslo_vmware.api [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Waiting for the task: (returnval){ [ 2075.594203] env[62684]: value = "task-2053226" [ 2075.594203] env[62684]: _type = "Task" [ 2075.594203] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2075.602742] env[62684]: DEBUG oslo_vmware.api [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053226, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2075.708173] env[62684]: DEBUG nova.scheduler.client.report [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2075.995765] env[62684]: DEBUG oslo_vmware.api [None req-f5a00295-cbb5-4284-9ff2-5c74e1e6aafc tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053224, 'name': Destroy_Task} progress is 33%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2076.046409] env[62684]: DEBUG oslo_vmware.api [None req-36ce61a2-0bd4-4a0b-ab78-65f49c086f51 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Task: {'id': task-2053225, 'name': PowerOffVM_Task, 'duration_secs': 0.172943} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2076.046409] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-36ce61a2-0bd4-4a0b-ab78-65f49c086f51 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2076.046675] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-36ce61a2-0bd4-4a0b-ab78-65f49c086f51 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2076.046992] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c744b5cb-7a64-4c35-b757-868f633f3d48 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.105184] env[62684]: DEBUG oslo_vmware.api [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053226, 'name': ReconfigVM_Task, 'duration_secs': 0.335047} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2076.105184] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Reconfigured VM instance instance-0000004b to attach disk [datastore2] 2f8f7e02-54fb-4275-badb-35c0b840ab33/2f8f7e02-54fb-4275-badb-35c0b840ab33.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2076.105184] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2d2430e4-b25c-4279-b3aa-5b72e657e9a3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.111568] env[62684]: DEBUG oslo_vmware.api [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Waiting for the task: (returnval){ [ 2076.111568] env[62684]: value = "task-2053228" [ 2076.111568] env[62684]: _type = "Task" [ 2076.111568] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2076.120036] env[62684]: DEBUG oslo_vmware.api [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053228, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2076.201168] env[62684]: DEBUG nova.objects.instance [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lazy-loading 'pci_requests' on Instance uuid 81b7949d-be24-46c9-8dc8-c249b65bb039 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2076.213196] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.364s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2076.213906] env[62684]: DEBUG nova.compute.manager [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 23578214-6708-43ae-88ce-56212083532a] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2076.216381] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.530s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2076.218191] env[62684]: INFO nova.compute.claims [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2076.293816] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-36ce61a2-0bd4-4a0b-ab78-65f49c086f51 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2076.294078] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-36ce61a2-0bd4-4a0b-ab78-65f49c086f51 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2076.294271] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-36ce61a2-0bd4-4a0b-ab78-65f49c086f51 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Deleting the datastore file [datastore1] 2aac4230-2070-48be-b91a-5cb4218a0574 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2076.294866] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d366b077-c317-405b-a060-ef12cec850f7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.301591] env[62684]: DEBUG oslo_vmware.api [None req-36ce61a2-0bd4-4a0b-ab78-65f49c086f51 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Waiting for the task: (returnval){ [ 2076.301591] env[62684]: value = "task-2053229" [ 2076.301591] env[62684]: _type = "Task" [ 2076.301591] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2076.309673] env[62684]: DEBUG oslo_vmware.api [None req-36ce61a2-0bd4-4a0b-ab78-65f49c086f51 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Task: {'id': task-2053229, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2076.390758] env[62684]: DEBUG nova.virt.hardware [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2076.391044] env[62684]: DEBUG nova.virt.hardware [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2076.391213] env[62684]: DEBUG nova.virt.hardware [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2076.391407] env[62684]: DEBUG nova.virt.hardware [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2076.391560] env[62684]: DEBUG nova.virt.hardware [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2076.391711] env[62684]: DEBUG nova.virt.hardware [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2076.392166] env[62684]: DEBUG nova.virt.hardware [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2076.392360] env[62684]: DEBUG nova.virt.hardware [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2076.392541] env[62684]: DEBUG nova.virt.hardware [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2076.392714] env[62684]: DEBUG nova.virt.hardware [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2076.392892] env[62684]: DEBUG nova.virt.hardware [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2076.394111] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60d8a9df-6f59-475a-b8f2-92aaa7cfb02b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.402903] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17db10fd-7e6a-4ce1-8f64-92afb1e2a7ad {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.417526] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6c:54:30', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'de5fcb06-b0d0-467f-86fe-06882165ac31', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b5e1d42c-c9c1-4c43-83b7-81eac7065383', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2076.425368] env[62684]: DEBUG oslo.service.loopingcall [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2076.425629] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2076.425846] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3fe27aeb-2940-40da-8081-649884697795 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.443823] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2076.443823] env[62684]: value = "task-2053230" [ 2076.443823] env[62684]: _type = "Task" [ 2076.443823] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2076.453900] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053230, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2076.492991] env[62684]: DEBUG oslo_vmware.api [None req-f5a00295-cbb5-4284-9ff2-5c74e1e6aafc tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053224, 'name': Destroy_Task, 'duration_secs': 0.945707} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2076.493322] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-f5a00295-cbb5-4284-9ff2-5c74e1e6aafc tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Destroyed the VM [ 2076.493587] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f5a00295-cbb5-4284-9ff2-5c74e1e6aafc tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Deleting Snapshot of the VM instance {{(pid=62684) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2076.493868] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-28cbddc3-a1e4-4944-b419-3db141d43fdb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.500380] env[62684]: DEBUG oslo_vmware.api [None req-f5a00295-cbb5-4284-9ff2-5c74e1e6aafc tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for the task: (returnval){ [ 2076.500380] env[62684]: value = "task-2053231" [ 2076.500380] env[62684]: _type = "Task" [ 2076.500380] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2076.508453] env[62684]: DEBUG oslo_vmware.api [None req-f5a00295-cbb5-4284-9ff2-5c74e1e6aafc tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053231, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2076.622214] env[62684]: DEBUG oslo_vmware.api [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053228, 'name': Rename_Task, 'duration_secs': 0.137229} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2076.622639] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2076.623056] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4025fdf2-6f3f-48eb-b8de-ae846b1a6f6a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.630292] env[62684]: DEBUG oslo_vmware.api [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Waiting for the task: (returnval){ [ 2076.630292] env[62684]: value = "task-2053232" [ 2076.630292] env[62684]: _type = "Task" [ 2076.630292] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2076.639983] env[62684]: DEBUG oslo_vmware.api [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053232, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2076.704413] env[62684]: DEBUG nova.objects.base [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Object Instance<81b7949d-be24-46c9-8dc8-c249b65bb039> lazy-loaded attributes: flavor,pci_requests {{(pid=62684) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2076.704666] env[62684]: DEBUG nova.network.neutron [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2076.723131] env[62684]: DEBUG nova.compute.utils [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2076.726878] env[62684]: DEBUG nova.compute.manager [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 23578214-6708-43ae-88ce-56212083532a] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2076.727586] env[62684]: DEBUG nova.network.neutron [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 23578214-6708-43ae-88ce-56212083532a] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2076.750506] env[62684]: DEBUG nova.policy [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e957449ae9d24bdaba38b3db704d3d61', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5cb4900a999e467bafdfd1fb407a82f4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2076.816259] env[62684]: DEBUG oslo_vmware.api [None req-36ce61a2-0bd4-4a0b-ab78-65f49c086f51 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Task: {'id': task-2053229, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145793} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2076.817717] env[62684]: DEBUG nova.policy [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6e8b54745b53458eafe4d911d7d6d7d1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6c54f74085f343d2b790145b0d82a9f8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2076.819754] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-36ce61a2-0bd4-4a0b-ab78-65f49c086f51 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2076.820015] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-36ce61a2-0bd4-4a0b-ab78-65f49c086f51 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2076.820838] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-36ce61a2-0bd4-4a0b-ab78-65f49c086f51 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2076.821063] env[62684]: INFO nova.compute.manager [None req-36ce61a2-0bd4-4a0b-ab78-65f49c086f51 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Took 1.31 seconds to destroy the instance on the hypervisor. [ 2076.821325] env[62684]: DEBUG oslo.service.loopingcall [None req-36ce61a2-0bd4-4a0b-ab78-65f49c086f51 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2076.821549] env[62684]: DEBUG nova.compute.manager [-] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2076.821640] env[62684]: DEBUG nova.network.neutron [-] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2076.962780] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053230, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2077.010881] env[62684]: DEBUG oslo_vmware.api [None req-f5a00295-cbb5-4284-9ff2-5c74e1e6aafc tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053231, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2077.149653] env[62684]: DEBUG oslo_vmware.api [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053232, 'name': PowerOnVM_Task, 'duration_secs': 0.449615} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2077.149653] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2077.149653] env[62684]: DEBUG nova.compute.manager [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2077.150073] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee5fdda6-84fd-4520-ab3a-7878762d09dd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.221428] env[62684]: DEBUG nova.network.neutron [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 23578214-6708-43ae-88ce-56212083532a] Successfully created port: 9833c404-668e-4109-a2eb-c4b18c1fa92c {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2077.228092] env[62684]: DEBUG nova.compute.manager [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 23578214-6708-43ae-88ce-56212083532a] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2077.300434] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2077.301110] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2077.433907] env[62684]: DEBUG nova.network.neutron [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Successfully created port: ff6434b2-d91e-43cc-b6f8-03cec921c38d {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2077.461046] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053230, 'name': CreateVM_Task, 'duration_secs': 0.555591} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2077.461046] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2077.464983] env[62684]: DEBUG oslo_concurrency.lockutils [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2077.465757] env[62684]: DEBUG oslo_concurrency.lockutils [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2077.465757] env[62684]: DEBUG oslo_concurrency.lockutils [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2077.468626] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31881e31-e98b-4cab-8a57-044ccc255dce {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.474473] env[62684]: DEBUG oslo_vmware.api [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2077.474473] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5294ea63-e1d9-71f3-deb0-b83628ea2c50" [ 2077.474473] env[62684]: _type = "Task" [ 2077.474473] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2077.482660] env[62684]: DEBUG oslo_vmware.api [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5294ea63-e1d9-71f3-deb0-b83628ea2c50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2077.512714] env[62684]: DEBUG oslo_vmware.api [None req-f5a00295-cbb5-4284-9ff2-5c74e1e6aafc tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053231, 'name': RemoveSnapshot_Task, 'duration_secs': 0.680821} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2077.513182] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f5a00295-cbb5-4284-9ff2-5c74e1e6aafc tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Deleted Snapshot of the VM instance {{(pid=62684) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2077.563611] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66f41d43-cf48-48e8-b55e-05f0c68e2ed0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.573502] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83279038-51d8-4eb5-80e1-c32872e8aecd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.612229] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1af7321f-db9a-4bc9-9059-bd53e813c0e4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.616976] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd9e94db-bae5-4a1d-882e-6cf31ae783f9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.631842] env[62684]: DEBUG nova.compute.provider_tree [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2077.671612] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2077.787882] env[62684]: DEBUG nova.compute.manager [req-4b96f16f-353e-4b0b-b4eb-d1e4162d2d7d req-645d4e08-0cd0-496e-883a-d70b052b0219 service nova] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Received event network-vif-deleted-868206c0-a9c5-4e53-a798-830df2d0c619 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2077.787882] env[62684]: INFO nova.compute.manager [req-4b96f16f-353e-4b0b-b4eb-d1e4162d2d7d req-645d4e08-0cd0-496e-883a-d70b052b0219 service nova] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Neutron deleted interface 868206c0-a9c5-4e53-a798-830df2d0c619; detaching it from the instance and deleting it from the info cache [ 2077.787882] env[62684]: DEBUG nova.network.neutron [req-4b96f16f-353e-4b0b-b4eb-d1e4162d2d7d req-645d4e08-0cd0-496e-883a-d70b052b0219 service nova] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2077.814407] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2077.814598] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Starting heal instance info cache {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2077.925246] env[62684]: DEBUG nova.network.neutron [-] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2077.988067] env[62684]: DEBUG oslo_vmware.api [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5294ea63-e1d9-71f3-deb0-b83628ea2c50, 'name': SearchDatastore_Task, 'duration_secs': 0.042655} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2077.989387] env[62684]: DEBUG oslo_concurrency.lockutils [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2077.989387] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2077.989577] env[62684]: DEBUG oslo_concurrency.lockutils [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2077.989577] env[62684]: DEBUG oslo_concurrency.lockutils [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2077.989859] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2077.990305] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-71a4e931-9a5a-4730-8e46-63879886180f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.998547] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2077.998734] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2077.999485] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91895c28-001c-4e20-925c-240dece6ab43 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.005604] env[62684]: DEBUG oslo_vmware.api [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2078.005604] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5263bb85-3ec3-8fd9-09ca-c4e2a5c04d50" [ 2078.005604] env[62684]: _type = "Task" [ 2078.005604] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2078.014169] env[62684]: DEBUG oslo_vmware.api [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5263bb85-3ec3-8fd9-09ca-c4e2a5c04d50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2078.017405] env[62684]: WARNING nova.compute.manager [None req-f5a00295-cbb5-4284-9ff2-5c74e1e6aafc tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Image not found during snapshot: nova.exception.ImageNotFound: Image 715beb26-5f3c-47db-931c-9eef95752fd8 could not be found. [ 2078.135181] env[62684]: DEBUG nova.scheduler.client.report [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2078.240987] env[62684]: DEBUG nova.compute.manager [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 23578214-6708-43ae-88ce-56212083532a] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2078.270396] env[62684]: DEBUG nova.virt.hardware [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2078.270660] env[62684]: DEBUG nova.virt.hardware [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2078.270821] env[62684]: DEBUG nova.virt.hardware [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2078.271017] env[62684]: DEBUG nova.virt.hardware [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2078.271173] env[62684]: DEBUG nova.virt.hardware [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2078.271352] env[62684]: DEBUG nova.virt.hardware [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2078.271585] env[62684]: DEBUG nova.virt.hardware [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2078.271751] env[62684]: DEBUG nova.virt.hardware [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2078.271921] env[62684]: DEBUG nova.virt.hardware [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2078.272104] env[62684]: DEBUG nova.virt.hardware [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2078.272288] env[62684]: DEBUG nova.virt.hardware [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2078.273161] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f885184c-4c81-47a0-a094-824198006e0e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.281413] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d05c89c9-21b1-4d51-9f8b-88262ebc6996 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.295051] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c8c5778a-2c94-4fcf-912a-b78041768556 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.303666] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca0759ed-70c7-49e1-a357-6821e9317752 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.337111] env[62684]: DEBUG nova.compute.manager [req-4b96f16f-353e-4b0b-b4eb-d1e4162d2d7d req-645d4e08-0cd0-496e-883a-d70b052b0219 service nova] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Detach interface failed, port_id=868206c0-a9c5-4e53-a798-830df2d0c619, reason: Instance 2aac4230-2070-48be-b91a-5cb4218a0574 could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2078.427442] env[62684]: INFO nova.compute.manager [-] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Took 1.61 seconds to deallocate network for instance. [ 2078.478425] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b0fc5e07-8882-4be2-aaea-054d3d003737 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Acquiring lock "2f8f7e02-54fb-4275-badb-35c0b840ab33" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2078.478712] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b0fc5e07-8882-4be2-aaea-054d3d003737 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Lock "2f8f7e02-54fb-4275-badb-35c0b840ab33" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2078.479413] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b0fc5e07-8882-4be2-aaea-054d3d003737 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Acquiring lock "2f8f7e02-54fb-4275-badb-35c0b840ab33-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2078.479621] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b0fc5e07-8882-4be2-aaea-054d3d003737 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Lock "2f8f7e02-54fb-4275-badb-35c0b840ab33-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2078.479795] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b0fc5e07-8882-4be2-aaea-054d3d003737 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Lock "2f8f7e02-54fb-4275-badb-35c0b840ab33-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2078.481675] env[62684]: INFO nova.compute.manager [None req-b0fc5e07-8882-4be2-aaea-054d3d003737 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Terminating instance [ 2078.483240] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b0fc5e07-8882-4be2-aaea-054d3d003737 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Acquiring lock "refresh_cache-2f8f7e02-54fb-4275-badb-35c0b840ab33" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2078.483427] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b0fc5e07-8882-4be2-aaea-054d3d003737 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Acquired lock "refresh_cache-2f8f7e02-54fb-4275-badb-35c0b840ab33" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2078.483613] env[62684]: DEBUG nova.network.neutron [None req-b0fc5e07-8882-4be2-aaea-054d3d003737 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2078.515835] env[62684]: DEBUG oslo_vmware.api [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5263bb85-3ec3-8fd9-09ca-c4e2a5c04d50, 'name': SearchDatastore_Task, 'duration_secs': 0.025026} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2078.516690] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4bebe6d4-350b-41f6-968a-452d79266f74 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.522906] env[62684]: DEBUG oslo_vmware.api [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2078.522906] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e26868-6fcc-6d5d-85a6-606b11af5713" [ 2078.522906] env[62684]: _type = "Task" [ 2078.522906] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2078.531528] env[62684]: DEBUG oslo_vmware.api [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e26868-6fcc-6d5d-85a6-606b11af5713, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2078.640753] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.424s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2078.641328] env[62684]: DEBUG nova.compute.manager [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2078.644048] env[62684]: DEBUG oslo_concurrency.lockutils [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.588s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2078.645543] env[62684]: INFO nova.compute.claims [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2078.692214] env[62684]: DEBUG oslo_concurrency.lockutils [None req-23c10594-4f41-4e61-891d-92fb0c22d11b tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Acquiring lock "845b2e2a-cee0-4598-afbd-1f07aa52468f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2078.692491] env[62684]: DEBUG oslo_concurrency.lockutils [None req-23c10594-4f41-4e61-891d-92fb0c22d11b tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Lock "845b2e2a-cee0-4598-afbd-1f07aa52468f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2078.692713] env[62684]: DEBUG oslo_concurrency.lockutils [None req-23c10594-4f41-4e61-891d-92fb0c22d11b tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Acquiring lock "845b2e2a-cee0-4598-afbd-1f07aa52468f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2078.692898] env[62684]: DEBUG oslo_concurrency.lockutils [None req-23c10594-4f41-4e61-891d-92fb0c22d11b tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Lock "845b2e2a-cee0-4598-afbd-1f07aa52468f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2078.693095] env[62684]: DEBUG oslo_concurrency.lockutils [None req-23c10594-4f41-4e61-891d-92fb0c22d11b tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Lock "845b2e2a-cee0-4598-afbd-1f07aa52468f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2078.699646] env[62684]: INFO nova.compute.manager [None req-23c10594-4f41-4e61-891d-92fb0c22d11b tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Terminating instance [ 2078.701466] env[62684]: DEBUG nova.compute.manager [None req-23c10594-4f41-4e61-891d-92fb0c22d11b tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2078.701676] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-23c10594-4f41-4e61-891d-92fb0c22d11b tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2078.702553] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f838b448-5457-4a8d-aa58-9e7fd2d1d5da {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.713722] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-23c10594-4f41-4e61-891d-92fb0c22d11b tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2078.714148] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9293bfeb-4513-4742-aad9-abd5f03cae34 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.724049] env[62684]: DEBUG oslo_vmware.api [None req-23c10594-4f41-4e61-891d-92fb0c22d11b tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for the task: (returnval){ [ 2078.724049] env[62684]: value = "task-2053233" [ 2078.724049] env[62684]: _type = "Task" [ 2078.724049] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2078.736284] env[62684]: DEBUG oslo_vmware.api [None req-23c10594-4f41-4e61-891d-92fb0c22d11b tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053233, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2078.819659] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Didn't find any instances for network info cache update. {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 2078.819916] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2078.820097] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2078.820259] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2078.820476] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2078.820661] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2078.820826] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2078.820963] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62684) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2078.821143] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2078.915520] env[62684]: DEBUG nova.compute.manager [req-d1528117-f049-4bb7-8233-357460398773 req-a6c302e7-1d7c-4720-b964-40c4b0653114 service nova] [instance: 23578214-6708-43ae-88ce-56212083532a] Received event network-vif-plugged-9833c404-668e-4109-a2eb-c4b18c1fa92c {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2078.915520] env[62684]: DEBUG oslo_concurrency.lockutils [req-d1528117-f049-4bb7-8233-357460398773 req-a6c302e7-1d7c-4720-b964-40c4b0653114 service nova] Acquiring lock "23578214-6708-43ae-88ce-56212083532a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2078.915520] env[62684]: DEBUG oslo_concurrency.lockutils [req-d1528117-f049-4bb7-8233-357460398773 req-a6c302e7-1d7c-4720-b964-40c4b0653114 service nova] Lock "23578214-6708-43ae-88ce-56212083532a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2078.915520] env[62684]: DEBUG oslo_concurrency.lockutils [req-d1528117-f049-4bb7-8233-357460398773 req-a6c302e7-1d7c-4720-b964-40c4b0653114 service nova] Lock "23578214-6708-43ae-88ce-56212083532a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2078.917582] env[62684]: DEBUG nova.compute.manager [req-d1528117-f049-4bb7-8233-357460398773 req-a6c302e7-1d7c-4720-b964-40c4b0653114 service nova] [instance: 23578214-6708-43ae-88ce-56212083532a] No waiting events found dispatching network-vif-plugged-9833c404-668e-4109-a2eb-c4b18c1fa92c {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2078.917582] env[62684]: WARNING nova.compute.manager [req-d1528117-f049-4bb7-8233-357460398773 req-a6c302e7-1d7c-4720-b964-40c4b0653114 service nova] [instance: 23578214-6708-43ae-88ce-56212083532a] Received unexpected event network-vif-plugged-9833c404-668e-4109-a2eb-c4b18c1fa92c for instance with vm_state building and task_state spawning. [ 2078.935330] env[62684]: DEBUG oslo_concurrency.lockutils [None req-36ce61a2-0bd4-4a0b-ab78-65f49c086f51 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2079.007919] env[62684]: DEBUG nova.network.neutron [None req-b0fc5e07-8882-4be2-aaea-054d3d003737 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2079.036428] env[62684]: DEBUG oslo_vmware.api [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e26868-6fcc-6d5d-85a6-606b11af5713, 'name': SearchDatastore_Task, 'duration_secs': 0.019734} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2079.037502] env[62684]: DEBUG oslo_concurrency.lockutils [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2079.039315] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 983218ac-7cf3-48ef-88d8-aa9e9322df4b/983218ac-7cf3-48ef-88d8-aa9e9322df4b.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2079.039315] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e368b27a-2c76-4939-a48e-d5e2616e7f09 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.046921] env[62684]: DEBUG oslo_vmware.api [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2079.046921] env[62684]: value = "task-2053234" [ 2079.046921] env[62684]: _type = "Task" [ 2079.046921] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2079.056536] env[62684]: DEBUG oslo_vmware.api [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053234, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2079.093418] env[62684]: DEBUG nova.network.neutron [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 23578214-6708-43ae-88ce-56212083532a] Successfully updated port: 9833c404-668e-4109-a2eb-c4b18c1fa92c {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2079.103770] env[62684]: DEBUG nova.network.neutron [None req-b0fc5e07-8882-4be2-aaea-054d3d003737 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2079.151496] env[62684]: DEBUG nova.compute.utils [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2079.164113] env[62684]: DEBUG nova.compute.manager [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Not allocating networking since 'none' was specified. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 2079.234237] env[62684]: DEBUG oslo_vmware.api [None req-23c10594-4f41-4e61-891d-92fb0c22d11b tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053233, 'name': PowerOffVM_Task, 'duration_secs': 0.170085} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2079.234699] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-23c10594-4f41-4e61-891d-92fb0c22d11b tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2079.235091] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-23c10594-4f41-4e61-891d-92fb0c22d11b tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2079.235904] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9b0629d0-bc53-448e-83a6-4a3718c8a4fa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.245602] env[62684]: DEBUG nova.network.neutron [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Successfully updated port: ff6434b2-d91e-43cc-b6f8-03cec921c38d {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2079.325550] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2079.563323] env[62684]: DEBUG oslo_vmware.api [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053234, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2079.572091] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-23c10594-4f41-4e61-891d-92fb0c22d11b tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2079.572268] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-23c10594-4f41-4e61-891d-92fb0c22d11b tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2079.573145] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-23c10594-4f41-4e61-891d-92fb0c22d11b tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Deleting the datastore file [datastore1] 845b2e2a-cee0-4598-afbd-1f07aa52468f {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2079.573145] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-df2ba25c-195b-4ebe-be2b-c2e5f7bf7db5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.580536] env[62684]: DEBUG oslo_vmware.api [None req-23c10594-4f41-4e61-891d-92fb0c22d11b tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for the task: (returnval){ [ 2079.580536] env[62684]: value = "task-2053236" [ 2079.580536] env[62684]: _type = "Task" [ 2079.580536] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2079.594661] env[62684]: DEBUG oslo_vmware.api [None req-23c10594-4f41-4e61-891d-92fb0c22d11b tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053236, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2079.597154] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "refresh_cache-23578214-6708-43ae-88ce-56212083532a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2079.597397] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquired lock "refresh_cache-23578214-6708-43ae-88ce-56212083532a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2079.597507] env[62684]: DEBUG nova.network.neutron [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 23578214-6708-43ae-88ce-56212083532a] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2079.606750] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b0fc5e07-8882-4be2-aaea-054d3d003737 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Releasing lock "refresh_cache-2f8f7e02-54fb-4275-badb-35c0b840ab33" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2079.607273] env[62684]: DEBUG nova.compute.manager [None req-b0fc5e07-8882-4be2-aaea-054d3d003737 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2079.607522] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b0fc5e07-8882-4be2-aaea-054d3d003737 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2079.608640] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a77dc055-d32d-4b8d-8d4c-ef50e6ec9eca {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.618135] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0fc5e07-8882-4be2-aaea-054d3d003737 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2079.618489] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-67f0cfb4-a78d-425a-806c-721614f976e7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.627562] env[62684]: DEBUG oslo_vmware.api [None req-b0fc5e07-8882-4be2-aaea-054d3d003737 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Waiting for the task: (returnval){ [ 2079.627562] env[62684]: value = "task-2053237" [ 2079.627562] env[62684]: _type = "Task" [ 2079.627562] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2079.636413] env[62684]: DEBUG oslo_vmware.api [None req-b0fc5e07-8882-4be2-aaea-054d3d003737 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053237, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2079.664708] env[62684]: DEBUG nova.compute.manager [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2079.748984] env[62684]: DEBUG oslo_concurrency.lockutils [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "refresh_cache-81b7949d-be24-46c9-8dc8-c249b65bb039" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2079.748984] env[62684]: DEBUG oslo_concurrency.lockutils [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquired lock "refresh_cache-81b7949d-be24-46c9-8dc8-c249b65bb039" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2079.749183] env[62684]: DEBUG nova.network.neutron [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2079.993470] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1914af2-88e3-49ee-971a-838c9cb37d93 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.006435] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-152da32f-c635-45f7-b512-c0758a970466 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.042124] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b5f0272-0629-470d-983d-6f1bdb3dc69a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.045816] env[62684]: DEBUG nova.compute.manager [req-d750679f-97be-4afe-9ba7-9a8127395bcc req-132dccc9-226a-4a43-866c-acd8051d60f8 service nova] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Received event network-vif-plugged-ff6434b2-d91e-43cc-b6f8-03cec921c38d {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2080.046048] env[62684]: DEBUG oslo_concurrency.lockutils [req-d750679f-97be-4afe-9ba7-9a8127395bcc req-132dccc9-226a-4a43-866c-acd8051d60f8 service nova] Acquiring lock "81b7949d-be24-46c9-8dc8-c249b65bb039-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2080.046270] env[62684]: DEBUG oslo_concurrency.lockutils [req-d750679f-97be-4afe-9ba7-9a8127395bcc req-132dccc9-226a-4a43-866c-acd8051d60f8 service nova] Lock "81b7949d-be24-46c9-8dc8-c249b65bb039-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2080.046445] env[62684]: DEBUG oslo_concurrency.lockutils [req-d750679f-97be-4afe-9ba7-9a8127395bcc req-132dccc9-226a-4a43-866c-acd8051d60f8 service nova] Lock "81b7949d-be24-46c9-8dc8-c249b65bb039-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2080.046616] env[62684]: DEBUG nova.compute.manager [req-d750679f-97be-4afe-9ba7-9a8127395bcc req-132dccc9-226a-4a43-866c-acd8051d60f8 service nova] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] No waiting events found dispatching network-vif-plugged-ff6434b2-d91e-43cc-b6f8-03cec921c38d {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2080.046781] env[62684]: WARNING nova.compute.manager [req-d750679f-97be-4afe-9ba7-9a8127395bcc req-132dccc9-226a-4a43-866c-acd8051d60f8 service nova] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Received unexpected event network-vif-plugged-ff6434b2-d91e-43cc-b6f8-03cec921c38d for instance with vm_state active and task_state None. [ 2080.046942] env[62684]: DEBUG nova.compute.manager [req-d750679f-97be-4afe-9ba7-9a8127395bcc req-132dccc9-226a-4a43-866c-acd8051d60f8 service nova] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Received event network-changed-ff6434b2-d91e-43cc-b6f8-03cec921c38d {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2080.047146] env[62684]: DEBUG nova.compute.manager [req-d750679f-97be-4afe-9ba7-9a8127395bcc req-132dccc9-226a-4a43-866c-acd8051d60f8 service nova] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Refreshing instance network info cache due to event network-changed-ff6434b2-d91e-43cc-b6f8-03cec921c38d. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2080.047445] env[62684]: DEBUG oslo_concurrency.lockutils [req-d750679f-97be-4afe-9ba7-9a8127395bcc req-132dccc9-226a-4a43-866c-acd8051d60f8 service nova] Acquiring lock "refresh_cache-81b7949d-be24-46c9-8dc8-c249b65bb039" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2080.055523] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-896bca77-9d0d-4e85-85e9-16a8ae5e4d09 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.062875] env[62684]: DEBUG oslo_vmware.api [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053234, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.597243} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2080.063576] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 983218ac-7cf3-48ef-88d8-aa9e9322df4b/983218ac-7cf3-48ef-88d8-aa9e9322df4b.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2080.063832] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2080.064101] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d3300acb-166d-45c2-a019-720b6192d146 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.075683] env[62684]: DEBUG nova.compute.provider_tree [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2080.086323] env[62684]: DEBUG oslo_vmware.api [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2080.086323] env[62684]: value = "task-2053238" [ 2080.086323] env[62684]: _type = "Task" [ 2080.086323] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2080.098546] env[62684]: DEBUG oslo_vmware.api [None req-23c10594-4f41-4e61-891d-92fb0c22d11b tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053236, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.20326} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2080.099440] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-23c10594-4f41-4e61-891d-92fb0c22d11b tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2080.100187] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-23c10594-4f41-4e61-891d-92fb0c22d11b tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2080.100187] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-23c10594-4f41-4e61-891d-92fb0c22d11b tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2080.100187] env[62684]: INFO nova.compute.manager [None req-23c10594-4f41-4e61-891d-92fb0c22d11b tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Took 1.40 seconds to destroy the instance on the hypervisor. [ 2080.100469] env[62684]: DEBUG oslo.service.loopingcall [None req-23c10594-4f41-4e61-891d-92fb0c22d11b tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2080.103197] env[62684]: DEBUG nova.compute.manager [-] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2080.103302] env[62684]: DEBUG nova.network.neutron [-] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2080.108521] env[62684]: DEBUG oslo_vmware.api [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053238, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2080.138324] env[62684]: DEBUG oslo_vmware.api [None req-b0fc5e07-8882-4be2-aaea-054d3d003737 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053237, 'name': PowerOffVM_Task, 'duration_secs': 0.114748} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2080.139253] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0fc5e07-8882-4be2-aaea-054d3d003737 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2080.140021] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b0fc5e07-8882-4be2-aaea-054d3d003737 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2080.140021] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-82dd74eb-cd67-448c-ad1f-c88f35ede407 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.164393] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b0fc5e07-8882-4be2-aaea-054d3d003737 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2080.164393] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b0fc5e07-8882-4be2-aaea-054d3d003737 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2080.164480] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0fc5e07-8882-4be2-aaea-054d3d003737 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Deleting the datastore file [datastore2] 2f8f7e02-54fb-4275-badb-35c0b840ab33 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2080.164709] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8a8d14b1-f3ea-41ab-bb4b-abe65f7d9b3e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.171117] env[62684]: DEBUG oslo_vmware.api [None req-b0fc5e07-8882-4be2-aaea-054d3d003737 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Waiting for the task: (returnval){ [ 2080.171117] env[62684]: value = "task-2053240" [ 2080.171117] env[62684]: _type = "Task" [ 2080.171117] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2080.181023] env[62684]: DEBUG nova.network.neutron [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 23578214-6708-43ae-88ce-56212083532a] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2080.190616] env[62684]: DEBUG oslo_vmware.api [None req-b0fc5e07-8882-4be2-aaea-054d3d003737 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053240, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2080.303308] env[62684]: WARNING nova.network.neutron [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] bbb78a3c-6804-4aae-9107-4ae6699c305d already exists in list: networks containing: ['bbb78a3c-6804-4aae-9107-4ae6699c305d']. ignoring it [ 2080.564119] env[62684]: DEBUG nova.network.neutron [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 23578214-6708-43ae-88ce-56212083532a] Updating instance_info_cache with network_info: [{"id": "9833c404-668e-4109-a2eb-c4b18c1fa92c", "address": "fa:16:3e:63:3a:c6", "network": {"id": "aa52badb-0b73-48bc-afaa-5e06a97d5c7d", "bridge": "br-int", "label": "tempest-ServersTestJSON-556342067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c54f74085f343d2b790145b0d82a9f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9833c404-66", "ovs_interfaceid": "9833c404-668e-4109-a2eb-c4b18c1fa92c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2080.580465] env[62684]: DEBUG nova.scheduler.client.report [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2080.598433] env[62684]: DEBUG oslo_vmware.api [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053238, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087496} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2080.598606] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2080.599477] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3562d9cc-d1cf-4853-84eb-bad8fc440c5c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.630675] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Reconfiguring VM instance instance-0000004c to attach disk [datastore2] 983218ac-7cf3-48ef-88d8-aa9e9322df4b/983218ac-7cf3-48ef-88d8-aa9e9322df4b.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2080.631429] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8752d6e1-2b7c-4218-be88-7f4ce93fe6af {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.655303] env[62684]: DEBUG oslo_vmware.api [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2080.655303] env[62684]: value = "task-2053241" [ 2080.655303] env[62684]: _type = "Task" [ 2080.655303] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2080.664631] env[62684]: DEBUG oslo_vmware.api [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053241, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2080.690464] env[62684]: DEBUG nova.compute.manager [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2080.693120] env[62684]: DEBUG oslo_vmware.api [None req-b0fc5e07-8882-4be2-aaea-054d3d003737 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053240, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.090156} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2080.693912] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0fc5e07-8882-4be2-aaea-054d3d003737 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2080.694044] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b0fc5e07-8882-4be2-aaea-054d3d003737 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2080.694297] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b0fc5e07-8882-4be2-aaea-054d3d003737 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2080.694509] env[62684]: INFO nova.compute.manager [None req-b0fc5e07-8882-4be2-aaea-054d3d003737 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Took 1.09 seconds to destroy the instance on the hypervisor. [ 2080.694789] env[62684]: DEBUG oslo.service.loopingcall [None req-b0fc5e07-8882-4be2-aaea-054d3d003737 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2080.696220] env[62684]: DEBUG nova.network.neutron [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Updating instance_info_cache with network_info: [{"id": "fafc2062-9754-4ce0-8647-362b6bb8f8d7", "address": "fa:16:3e:a1:7d:89", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.142", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfafc2062-97", "ovs_interfaceid": "fafc2062-9754-4ce0-8647-362b6bb8f8d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ff6434b2-d91e-43cc-b6f8-03cec921c38d", "address": "fa:16:3e:88:3a:fb", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff6434b2-d9", "ovs_interfaceid": "ff6434b2-d91e-43cc-b6f8-03cec921c38d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2080.697346] env[62684]: DEBUG nova.compute.manager [-] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2080.702470] env[62684]: DEBUG nova.network.neutron [-] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2080.723385] env[62684]: DEBUG nova.network.neutron [-] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2080.728959] env[62684]: DEBUG nova.virt.hardware [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2080.728959] env[62684]: DEBUG nova.virt.hardware [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2080.728959] env[62684]: DEBUG nova.virt.hardware [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2080.728959] env[62684]: DEBUG nova.virt.hardware [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2080.728959] env[62684]: DEBUG nova.virt.hardware [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2080.728959] env[62684]: DEBUG nova.virt.hardware [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2080.728959] env[62684]: DEBUG nova.virt.hardware [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2080.728959] env[62684]: DEBUG nova.virt.hardware [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2080.728959] env[62684]: DEBUG nova.virt.hardware [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2080.728959] env[62684]: DEBUG nova.virt.hardware [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2080.728959] env[62684]: DEBUG nova.virt.hardware [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2080.730101] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce60b70-c5f6-4913-8843-91756f05636c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.739814] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe3d5bb3-85b7-4393-8db9-771a2fdcc071 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.754963] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Instance VIF info [] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2080.760723] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Creating folder: Project (5940678d2d50482ab9f927dd4d1b37c1). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2080.765117] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0325af7a-d547-45aa-8d47-e29a77527a2a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.776178] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Created folder: Project (5940678d2d50482ab9f927dd4d1b37c1) in parent group-v421118. [ 2080.776406] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Creating folder: Instances. Parent ref: group-v421336. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2080.776662] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5607f170-4a09-4a1c-9135-ec537ca9523f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.785891] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Created folder: Instances in parent group-v421336. [ 2080.786552] env[62684]: DEBUG oslo.service.loopingcall [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2080.786552] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2080.786552] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-36460216-f35a-4629-bde8-cf7bee5ed971 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.802485] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2080.802485] env[62684]: value = "task-2053244" [ 2080.802485] env[62684]: _type = "Task" [ 2080.802485] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2080.810188] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053244, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2081.068903] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Releasing lock "refresh_cache-23578214-6708-43ae-88ce-56212083532a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2081.069265] env[62684]: DEBUG nova.compute.manager [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 23578214-6708-43ae-88ce-56212083532a] Instance network_info: |[{"id": "9833c404-668e-4109-a2eb-c4b18c1fa92c", "address": "fa:16:3e:63:3a:c6", "network": {"id": "aa52badb-0b73-48bc-afaa-5e06a97d5c7d", "bridge": "br-int", "label": "tempest-ServersTestJSON-556342067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c54f74085f343d2b790145b0d82a9f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9833c404-66", "ovs_interfaceid": "9833c404-668e-4109-a2eb-c4b18c1fa92c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2081.069737] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 23578214-6708-43ae-88ce-56212083532a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:63:3a:c6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1fb81f98-6f5a-47ab-a512-27277591d064', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9833c404-668e-4109-a2eb-c4b18c1fa92c', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2081.083537] env[62684]: DEBUG oslo.service.loopingcall [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2081.083537] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 23578214-6708-43ae-88ce-56212083532a] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2081.083537] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2f94e8eb-d5d3-4ae6-976e-a3cd8e5e54bd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.099185] env[62684]: DEBUG oslo_concurrency.lockutils [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.455s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2081.099718] env[62684]: DEBUG nova.compute.manager [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2081.102405] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b9de025c-517b-4a69-961d-49a684c89246 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.546s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2081.102631] env[62684]: DEBUG nova.objects.instance [None req-b9de025c-517b-4a69-961d-49a684c89246 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Lazy-loading 'resources' on Instance uuid 26303c0e-be87-41ff-a15c-e92f91f8a05f {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2081.108499] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2081.108499] env[62684]: value = "task-2053245" [ 2081.108499] env[62684]: _type = "Task" [ 2081.108499] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2081.116610] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053245, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2081.167204] env[62684]: DEBUG oslo_vmware.api [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053241, 'name': ReconfigVM_Task, 'duration_secs': 0.363339} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2081.168822] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Reconfigured VM instance instance-0000004c to attach disk [datastore2] 983218ac-7cf3-48ef-88d8-aa9e9322df4b/983218ac-7cf3-48ef-88d8-aa9e9322df4b.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2081.169898] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e0574f1c-7694-480d-930b-64dc59f50aa7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.178176] env[62684]: DEBUG oslo_vmware.api [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2081.178176] env[62684]: value = "task-2053246" [ 2081.178176] env[62684]: _type = "Task" [ 2081.178176] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2081.181274] env[62684]: DEBUG nova.network.neutron [-] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2081.189686] env[62684]: DEBUG oslo_vmware.api [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053246, 'name': Rename_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2081.205174] env[62684]: DEBUG oslo_concurrency.lockutils [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Releasing lock "refresh_cache-81b7949d-be24-46c9-8dc8-c249b65bb039" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2081.205174] env[62684]: DEBUG oslo_concurrency.lockutils [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "81b7949d-be24-46c9-8dc8-c249b65bb039" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2081.205174] env[62684]: DEBUG oslo_concurrency.lockutils [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquired lock "81b7949d-be24-46c9-8dc8-c249b65bb039" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2081.205373] env[62684]: DEBUG oslo_concurrency.lockutils [req-d750679f-97be-4afe-9ba7-9a8127395bcc req-132dccc9-226a-4a43-866c-acd8051d60f8 service nova] Acquired lock "refresh_cache-81b7949d-be24-46c9-8dc8-c249b65bb039" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2081.205484] env[62684]: DEBUG nova.network.neutron [req-d750679f-97be-4afe-9ba7-9a8127395bcc req-132dccc9-226a-4a43-866c-acd8051d60f8 service nova] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Refreshing network info cache for port ff6434b2-d91e-43cc-b6f8-03cec921c38d {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2081.207146] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6309e771-6d21-454b-8e8c-bdc0a3f0897a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.226709] env[62684]: DEBUG nova.virt.hardware [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2081.226709] env[62684]: DEBUG nova.virt.hardware [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2081.226709] env[62684]: DEBUG nova.virt.hardware [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2081.226709] env[62684]: DEBUG nova.virt.hardware [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2081.226709] env[62684]: DEBUG nova.virt.hardware [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2081.226709] env[62684]: DEBUG nova.virt.hardware [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2081.226709] env[62684]: DEBUG nova.virt.hardware [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2081.227532] env[62684]: DEBUG nova.virt.hardware [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2081.227532] env[62684]: DEBUG nova.virt.hardware [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2081.227532] env[62684]: DEBUG nova.virt.hardware [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2081.227532] env[62684]: DEBUG nova.virt.hardware [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2081.237031] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Reconfiguring VM to attach interface {{(pid=62684) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 2081.237031] env[62684]: DEBUG nova.network.neutron [-] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2081.237031] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-07dba794-31c3-43d4-9e66-b7d8a17779a2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.255311] env[62684]: DEBUG oslo_vmware.api [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 2081.255311] env[62684]: value = "task-2053247" [ 2081.255311] env[62684]: _type = "Task" [ 2081.255311] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2081.263970] env[62684]: DEBUG oslo_vmware.api [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053247, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2081.312210] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053244, 'name': CreateVM_Task, 'duration_secs': 0.296374} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2081.312563] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2081.315863] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2081.315863] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2081.315863] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2081.315863] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65262e79-f868-4fa8-bbd4-196fe1d12920 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.318348] env[62684]: DEBUG oslo_vmware.api [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Waiting for the task: (returnval){ [ 2081.318348] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5284cad6-99f9-572f-23ac-55188c673efc" [ 2081.318348] env[62684]: _type = "Task" [ 2081.318348] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2081.328050] env[62684]: DEBUG oslo_vmware.api [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5284cad6-99f9-572f-23ac-55188c673efc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2081.606307] env[62684]: DEBUG nova.compute.utils [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2081.610648] env[62684]: DEBUG nova.compute.manager [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2081.610819] env[62684]: DEBUG nova.network.neutron [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2081.623640] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053245, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2081.670693] env[62684]: DEBUG nova.policy [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b4de13d501bc4b8aaa78b8153b766921', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e57b232a7e7647c7a3b2bca3c096feb7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2081.688370] env[62684]: INFO nova.compute.manager [-] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Took 1.58 seconds to deallocate network for instance. [ 2081.688750] env[62684]: DEBUG oslo_vmware.api [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053246, 'name': Rename_Task, 'duration_secs': 0.165423} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2081.692696] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2081.693392] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1ffe5cdf-ee3e-4b51-be66-b08fcc505b28 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.702728] env[62684]: DEBUG oslo_vmware.api [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2081.702728] env[62684]: value = "task-2053248" [ 2081.702728] env[62684]: _type = "Task" [ 2081.702728] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2081.715892] env[62684]: DEBUG oslo_vmware.api [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053248, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2081.751346] env[62684]: INFO nova.compute.manager [-] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Took 1.05 seconds to deallocate network for instance. [ 2081.768220] env[62684]: DEBUG oslo_vmware.api [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053247, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2081.839308] env[62684]: DEBUG oslo_vmware.api [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5284cad6-99f9-572f-23ac-55188c673efc, 'name': SearchDatastore_Task, 'duration_secs': 0.012236} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2081.839308] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2081.839308] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2081.839308] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2081.839308] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2081.839308] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2081.839679] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-517e33a7-3b7d-42c2-822b-13c6746a74b9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.854195] env[62684]: DEBUG nova.compute.manager [req-0f078d41-7de3-40b0-b76c-2f83a51f9d0b req-1ecf0ac3-5a00-4eea-8694-b0ee67cc2cd3 service nova] [instance: 23578214-6708-43ae-88ce-56212083532a] Received event network-changed-9833c404-668e-4109-a2eb-c4b18c1fa92c {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2081.854348] env[62684]: DEBUG nova.compute.manager [req-0f078d41-7de3-40b0-b76c-2f83a51f9d0b req-1ecf0ac3-5a00-4eea-8694-b0ee67cc2cd3 service nova] [instance: 23578214-6708-43ae-88ce-56212083532a] Refreshing instance network info cache due to event network-changed-9833c404-668e-4109-a2eb-c4b18c1fa92c. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2081.854904] env[62684]: DEBUG oslo_concurrency.lockutils [req-0f078d41-7de3-40b0-b76c-2f83a51f9d0b req-1ecf0ac3-5a00-4eea-8694-b0ee67cc2cd3 service nova] Acquiring lock "refresh_cache-23578214-6708-43ae-88ce-56212083532a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2081.855086] env[62684]: DEBUG oslo_concurrency.lockutils [req-0f078d41-7de3-40b0-b76c-2f83a51f9d0b req-1ecf0ac3-5a00-4eea-8694-b0ee67cc2cd3 service nova] Acquired lock "refresh_cache-23578214-6708-43ae-88ce-56212083532a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2081.855269] env[62684]: DEBUG nova.network.neutron [req-0f078d41-7de3-40b0-b76c-2f83a51f9d0b req-1ecf0ac3-5a00-4eea-8694-b0ee67cc2cd3 service nova] [instance: 23578214-6708-43ae-88ce-56212083532a] Refreshing network info cache for port 9833c404-668e-4109-a2eb-c4b18c1fa92c {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2081.858180] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2081.858180] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2081.859740] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36862fed-aa61-48b1-adfc-a9e5a7fac478 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.868700] env[62684]: DEBUG oslo_vmware.api [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Waiting for the task: (returnval){ [ 2081.868700] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b345c1-aeb4-7aa2-80ae-7cca0591a249" [ 2081.868700] env[62684]: _type = "Task" [ 2081.868700] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2081.884623] env[62684]: DEBUG oslo_vmware.api [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b345c1-aeb4-7aa2-80ae-7cca0591a249, 'name': SearchDatastore_Task, 'duration_secs': 0.008777} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2081.888681] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64e52f05-0450-4cad-85f8-1dc4b3d5c729 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.895049] env[62684]: DEBUG oslo_vmware.api [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Waiting for the task: (returnval){ [ 2081.895049] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52df8ac1-d286-f551-71ef-78df3e2ba22a" [ 2081.895049] env[62684]: _type = "Task" [ 2081.895049] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2081.907163] env[62684]: DEBUG oslo_vmware.api [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52df8ac1-d286-f551-71ef-78df3e2ba22a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2081.965926] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25eb2489-82e7-4501-899c-06d72aa1a46b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.974444] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dbc51ec-8d99-4eef-a584-0dfee78cdf02 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.011484] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5597a560-0690-4c63-8764-b43e8c753c8d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.022904] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6e1f20c-2ee0-45de-8fa6-65c707377195 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.034521] env[62684]: DEBUG nova.compute.provider_tree [None req-b9de025c-517b-4a69-961d-49a684c89246 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2082.068499] env[62684]: DEBUG nova.network.neutron [req-d750679f-97be-4afe-9ba7-9a8127395bcc req-132dccc9-226a-4a43-866c-acd8051d60f8 service nova] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Updated VIF entry in instance network info cache for port ff6434b2-d91e-43cc-b6f8-03cec921c38d. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2082.068965] env[62684]: DEBUG nova.network.neutron [req-d750679f-97be-4afe-9ba7-9a8127395bcc req-132dccc9-226a-4a43-866c-acd8051d60f8 service nova] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Updating instance_info_cache with network_info: [{"id": "fafc2062-9754-4ce0-8647-362b6bb8f8d7", "address": "fa:16:3e:a1:7d:89", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.142", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfafc2062-97", "ovs_interfaceid": "fafc2062-9754-4ce0-8647-362b6bb8f8d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ff6434b2-d91e-43cc-b6f8-03cec921c38d", "address": "fa:16:3e:88:3a:fb", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff6434b2-d9", "ovs_interfaceid": "ff6434b2-d91e-43cc-b6f8-03cec921c38d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2082.112317] env[62684]: DEBUG nova.compute.manager [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2082.128941] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053245, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2082.200034] env[62684]: DEBUG oslo_concurrency.lockutils [None req-23c10594-4f41-4e61-891d-92fb0c22d11b tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2082.212871] env[62684]: DEBUG oslo_vmware.api [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053248, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2082.265316] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b0fc5e07-8882-4be2-aaea-054d3d003737 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2082.269233] env[62684]: DEBUG oslo_vmware.api [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053247, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2082.415307] env[62684]: DEBUG oslo_vmware.api [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52df8ac1-d286-f551-71ef-78df3e2ba22a, 'name': SearchDatastore_Task, 'duration_secs': 0.022702} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2082.415934] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2082.417374] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 4cf48f05-d643-47e6-9a0b-33415d80890c/4cf48f05-d643-47e6-9a0b-33415d80890c.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2082.418702] env[62684]: DEBUG nova.compute.manager [req-c4421aae-8575-49c7-8106-5b1236f30234 req-b7433339-6212-4cad-8507-a9f046460e1c service nova] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Received event network-vif-deleted-d01b5a43-4c06-4869-b3d4-b610699f6bb1 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2082.419683] env[62684]: DEBUG nova.network.neutron [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Successfully created port: da310d7c-cd12-49ca-8014-efa9469aef45 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2082.422024] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0f871772-59e6-453f-94e4-7df6864a2115 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.431378] env[62684]: DEBUG oslo_vmware.api [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Waiting for the task: (returnval){ [ 2082.431378] env[62684]: value = "task-2053249" [ 2082.431378] env[62684]: _type = "Task" [ 2082.431378] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2082.440643] env[62684]: DEBUG oslo_vmware.api [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Task: {'id': task-2053249, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2082.538662] env[62684]: DEBUG nova.scheduler.client.report [None req-b9de025c-517b-4a69-961d-49a684c89246 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2082.572157] env[62684]: DEBUG oslo_concurrency.lockutils [req-d750679f-97be-4afe-9ba7-9a8127395bcc req-132dccc9-226a-4a43-866c-acd8051d60f8 service nova] Releasing lock "refresh_cache-81b7949d-be24-46c9-8dc8-c249b65bb039" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2082.639031] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053245, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2082.721017] env[62684]: DEBUG oslo_vmware.api [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053248, 'name': PowerOnVM_Task, 'duration_secs': 0.784988} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2082.721017] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2082.721017] env[62684]: DEBUG nova.compute.manager [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2082.721017] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c548aa8-d0d4-433a-8736-9347a6e6f5ee {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.767542] env[62684]: DEBUG nova.network.neutron [req-0f078d41-7de3-40b0-b76c-2f83a51f9d0b req-1ecf0ac3-5a00-4eea-8694-b0ee67cc2cd3 service nova] [instance: 23578214-6708-43ae-88ce-56212083532a] Updated VIF entry in instance network info cache for port 9833c404-668e-4109-a2eb-c4b18c1fa92c. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2082.767970] env[62684]: DEBUG nova.network.neutron [req-0f078d41-7de3-40b0-b76c-2f83a51f9d0b req-1ecf0ac3-5a00-4eea-8694-b0ee67cc2cd3 service nova] [instance: 23578214-6708-43ae-88ce-56212083532a] Updating instance_info_cache with network_info: [{"id": "9833c404-668e-4109-a2eb-c4b18c1fa92c", "address": "fa:16:3e:63:3a:c6", "network": {"id": "aa52badb-0b73-48bc-afaa-5e06a97d5c7d", "bridge": "br-int", "label": "tempest-ServersTestJSON-556342067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c54f74085f343d2b790145b0d82a9f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9833c404-66", "ovs_interfaceid": "9833c404-668e-4109-a2eb-c4b18c1fa92c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2082.779984] env[62684]: DEBUG oslo_vmware.api [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053247, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2082.942615] env[62684]: DEBUG oslo_vmware.api [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Task: {'id': task-2053249, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.053315] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b9de025c-517b-4a69-961d-49a684c89246 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.951s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2083.055825] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 5.384s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2083.056420] env[62684]: DEBUG nova.objects.instance [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62684) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 2083.084808] env[62684]: INFO nova.scheduler.client.report [None req-b9de025c-517b-4a69-961d-49a684c89246 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Deleted allocations for instance 26303c0e-be87-41ff-a15c-e92f91f8a05f [ 2083.125483] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053245, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.130106] env[62684]: DEBUG nova.compute.manager [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2083.161412] env[62684]: DEBUG nova.virt.hardware [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2083.161412] env[62684]: DEBUG nova.virt.hardware [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2083.161412] env[62684]: DEBUG nova.virt.hardware [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2083.161670] env[62684]: DEBUG nova.virt.hardware [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2083.161670] env[62684]: DEBUG nova.virt.hardware [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2083.161815] env[62684]: DEBUG nova.virt.hardware [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2083.165022] env[62684]: DEBUG nova.virt.hardware [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2083.165022] env[62684]: DEBUG nova.virt.hardware [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2083.165022] env[62684]: DEBUG nova.virt.hardware [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2083.165022] env[62684]: DEBUG nova.virt.hardware [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2083.165022] env[62684]: DEBUG nova.virt.hardware [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2083.165022] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d15076d9-4dfc-4f76-ba72-e9a9f4247164 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.174847] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11eddca0-a8ef-4419-af3d-03738a2639eb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.243119] env[62684]: DEBUG oslo_concurrency.lockutils [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2083.270624] env[62684]: DEBUG oslo_vmware.api [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053247, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.280351] env[62684]: DEBUG oslo_concurrency.lockutils [req-0f078d41-7de3-40b0-b76c-2f83a51f9d0b req-1ecf0ac3-5a00-4eea-8694-b0ee67cc2cd3 service nova] Releasing lock "refresh_cache-23578214-6708-43ae-88ce-56212083532a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2083.445345] env[62684]: DEBUG oslo_vmware.api [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Task: {'id': task-2053249, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.533402} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2083.445345] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 4cf48f05-d643-47e6-9a0b-33415d80890c/4cf48f05-d643-47e6-9a0b-33415d80890c.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2083.445345] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2083.445345] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-df8c1800-673a-4f25-a381-bad1593bb372 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.450908] env[62684]: DEBUG oslo_vmware.api [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Waiting for the task: (returnval){ [ 2083.450908] env[62684]: value = "task-2053250" [ 2083.450908] env[62684]: _type = "Task" [ 2083.450908] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2083.458695] env[62684]: DEBUG oslo_vmware.api [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Task: {'id': task-2053250, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.599408] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b9de025c-517b-4a69-961d-49a684c89246 tempest-ServersV294TestFqdnHostnames-1811751678 tempest-ServersV294TestFqdnHostnames-1811751678-project-member] Lock "26303c0e-be87-41ff-a15c-e92f91f8a05f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.471s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2083.627463] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053245, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.775540] env[62684]: DEBUG oslo_vmware.api [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053247, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.960505] env[62684]: DEBUG oslo_vmware.api [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Task: {'id': task-2053250, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068138} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2083.960739] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2083.961555] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebc57e2a-5a5a-4222-96d0-854be17b7716 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.980620] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] 4cf48f05-d643-47e6-9a0b-33415d80890c/4cf48f05-d643-47e6-9a0b-33415d80890c.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2083.980912] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e7c51d8e-ab0a-4328-b3b8-008c894e6d96 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.001906] env[62684]: DEBUG oslo_vmware.api [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Waiting for the task: (returnval){ [ 2084.001906] env[62684]: value = "task-2053251" [ 2084.001906] env[62684]: _type = "Task" [ 2084.001906] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2084.010095] env[62684]: DEBUG oslo_vmware.api [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Task: {'id': task-2053251, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2084.067556] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a306f669-af88-436a-b55a-651c14a2616a tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2084.068859] env[62684]: DEBUG oslo_concurrency.lockutils [None req-36ce61a2-0bd4-4a0b-ab78-65f49c086f51 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.134s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2084.069034] env[62684]: DEBUG nova.objects.instance [None req-36ce61a2-0bd4-4a0b-ab78-65f49c086f51 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Lazy-loading 'resources' on Instance uuid 2aac4230-2070-48be-b91a-5cb4218a0574 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2084.129947] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053245, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2084.275028] env[62684]: DEBUG oslo_vmware.api [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053247, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2084.512159] env[62684]: DEBUG oslo_vmware.api [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Task: {'id': task-2053251, 'name': ReconfigVM_Task, 'duration_secs': 0.25648} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2084.512520] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Reconfigured VM instance instance-0000004f to attach disk [datastore1] 4cf48f05-d643-47e6-9a0b-33415d80890c/4cf48f05-d643-47e6-9a0b-33415d80890c.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2084.513096] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bf012b88-1a60-457e-a22c-f53ddf23cba6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.520415] env[62684]: DEBUG oslo_vmware.api [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Waiting for the task: (returnval){ [ 2084.520415] env[62684]: value = "task-2053252" [ 2084.520415] env[62684]: _type = "Task" [ 2084.520415] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2084.528609] env[62684]: DEBUG oslo_vmware.api [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Task: {'id': task-2053252, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2084.630717] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053245, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2084.773464] env[62684]: DEBUG oslo_vmware.api [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053247, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2084.801131] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-631daad5-1a12-450d-b8a0-303173d1e2da {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.808961] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee62b8d8-e046-4c8c-9a2b-9841271ee384 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.839182] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d22294c-a147-429a-b1d3-aae082b1d090 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.846745] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-524200c3-7d5d-48ad-884d-0550b1278562 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.860628] env[62684]: DEBUG nova.compute.provider_tree [None req-36ce61a2-0bd4-4a0b-ab78-65f49c086f51 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2085.041827] env[62684]: DEBUG oslo_vmware.api [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Task: {'id': task-2053252, 'name': Rename_Task, 'duration_secs': 0.14041} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2085.042945] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2085.043496] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9c500e98-7db9-4316-824d-68f5740520e5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.054016] env[62684]: DEBUG oslo_vmware.api [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Waiting for the task: (returnval){ [ 2085.054016] env[62684]: value = "task-2053253" [ 2085.054016] env[62684]: _type = "Task" [ 2085.054016] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2085.064996] env[62684]: DEBUG oslo_vmware.api [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Task: {'id': task-2053253, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2085.129450] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053245, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2085.275577] env[62684]: DEBUG oslo_vmware.api [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053247, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2085.366298] env[62684]: DEBUG nova.scheduler.client.report [None req-36ce61a2-0bd4-4a0b-ab78-65f49c086f51 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2085.461765] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fbb14cc6-eed5-4ebd-8799-78b655876376 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "983218ac-7cf3-48ef-88d8-aa9e9322df4b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2085.461765] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fbb14cc6-eed5-4ebd-8799-78b655876376 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "983218ac-7cf3-48ef-88d8-aa9e9322df4b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2085.461765] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fbb14cc6-eed5-4ebd-8799-78b655876376 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "983218ac-7cf3-48ef-88d8-aa9e9322df4b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2085.461765] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fbb14cc6-eed5-4ebd-8799-78b655876376 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "983218ac-7cf3-48ef-88d8-aa9e9322df4b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2085.461765] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fbb14cc6-eed5-4ebd-8799-78b655876376 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "983218ac-7cf3-48ef-88d8-aa9e9322df4b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2085.463160] env[62684]: INFO nova.compute.manager [None req-fbb14cc6-eed5-4ebd-8799-78b655876376 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Terminating instance [ 2085.465376] env[62684]: DEBUG nova.compute.manager [None req-fbb14cc6-eed5-4ebd-8799-78b655876376 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2085.465717] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-fbb14cc6-eed5-4ebd-8799-78b655876376 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2085.466704] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95c18e73-7b7b-43ab-b8af-edc98e64a624 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.481176] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbb14cc6-eed5-4ebd-8799-78b655876376 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2085.481176] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-10a4b23d-f21a-4486-85df-389a68cc29f1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.488203] env[62684]: DEBUG oslo_vmware.api [None req-fbb14cc6-eed5-4ebd-8799-78b655876376 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2085.488203] env[62684]: value = "task-2053254" [ 2085.488203] env[62684]: _type = "Task" [ 2085.488203] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2085.497936] env[62684]: DEBUG oslo_vmware.api [None req-fbb14cc6-eed5-4ebd-8799-78b655876376 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053254, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2085.562838] env[62684]: DEBUG oslo_vmware.api [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Task: {'id': task-2053253, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2085.629812] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053245, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2085.777213] env[62684]: DEBUG oslo_vmware.api [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053247, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2085.870580] env[62684]: DEBUG oslo_concurrency.lockutils [None req-36ce61a2-0bd4-4a0b-ab78-65f49c086f51 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.802s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2085.873104] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 6.548s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2085.873297] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2085.873480] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62684) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2085.873940] env[62684]: DEBUG oslo_concurrency.lockutils [None req-23c10594-4f41-4e61-891d-92fb0c22d11b tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.674s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2085.874013] env[62684]: DEBUG nova.objects.instance [None req-23c10594-4f41-4e61-891d-92fb0c22d11b tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Lazy-loading 'resources' on Instance uuid 845b2e2a-cee0-4598-afbd-1f07aa52468f {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2085.875878] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eb5d6ce-41ee-4e5c-877d-5c7b4d47c564 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.885895] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0dd3384-a253-4f9e-92ba-74ac800d23b8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.903890] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-783de09f-75b2-456f-873d-361eacc588c7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.908915] env[62684]: INFO nova.scheduler.client.report [None req-36ce61a2-0bd4-4a0b-ab78-65f49c086f51 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Deleted allocations for instance 2aac4230-2070-48be-b91a-5cb4218a0574 [ 2085.912646] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ca0ea1a-a172-4c92-961f-cf297fbb7748 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.952031] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178393MB free_disk=154GB free_vcpus=48 pci_devices=None {{(pid=62684) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2085.952031] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2085.998126] env[62684]: DEBUG oslo_vmware.api [None req-fbb14cc6-eed5-4ebd-8799-78b655876376 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053254, 'name': PowerOffVM_Task, 'duration_secs': 0.192665} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2085.998398] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbb14cc6-eed5-4ebd-8799-78b655876376 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2085.998597] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-fbb14cc6-eed5-4ebd-8799-78b655876376 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2085.999145] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f58c8c3b-7715-43b7-9f91-ee320a55146e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.062989] env[62684]: DEBUG oslo_vmware.api [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Task: {'id': task-2053253, 'name': PowerOnVM_Task, 'duration_secs': 0.605579} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2086.063291] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2086.063506] env[62684]: INFO nova.compute.manager [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Took 5.37 seconds to spawn the instance on the hypervisor. [ 2086.063778] env[62684]: DEBUG nova.compute.manager [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2086.064459] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c784254-e587-40c3-8a8e-2d2b741b3694 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.130343] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053245, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2086.281452] env[62684]: DEBUG oslo_vmware.api [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053247, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2086.429557] env[62684]: DEBUG oslo_concurrency.lockutils [None req-36ce61a2-0bd4-4a0b-ab78-65f49c086f51 tempest-ServerAddressesTestJSON-737568951 tempest-ServerAddressesTestJSON-737568951-project-member] Lock "2aac4230-2070-48be-b91a-5cb4218a0574" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.921s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2086.569125] env[62684]: DEBUG oslo_concurrency.lockutils [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "28886f7c-6efc-4505-84f6-682d75cea215" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2086.569414] env[62684]: DEBUG oslo_concurrency.lockutils [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "28886f7c-6efc-4505-84f6-682d75cea215" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2086.584749] env[62684]: INFO nova.compute.manager [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Took 11.92 seconds to build instance. [ 2086.634831] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053245, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2086.673532] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebef784d-11c9-4716-9dc2-ed6ac6170252 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.681682] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd3b18f2-6591-422c-a3ff-b9fc622d050a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.718124] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1846818b-6b7d-41a5-a7d7-7827d07469c4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.726606] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b37b5c8f-b20e-4de0-b1f9-2350a85e8f15 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.741305] env[62684]: DEBUG nova.compute.provider_tree [None req-23c10594-4f41-4e61-891d-92fb0c22d11b tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2086.781509] env[62684]: DEBUG oslo_vmware.api [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053247, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2087.075022] env[62684]: DEBUG nova.compute.manager [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2087.087944] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f750711b-d0a5-462b-8834-3bebcff46824 tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Lock "4cf48f05-d643-47e6-9a0b-33415d80890c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.427s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2087.137892] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053245, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2087.244589] env[62684]: DEBUG nova.scheduler.client.report [None req-23c10594-4f41-4e61-891d-92fb0c22d11b tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2087.283978] env[62684]: DEBUG oslo_vmware.api [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053247, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2087.522222] env[62684]: INFO nova.compute.manager [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Rebuilding instance [ 2087.573866] env[62684]: DEBUG nova.compute.manager [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2087.575111] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c8d4a3e-27b4-408d-b2db-2d26eb65028b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.603804] env[62684]: DEBUG oslo_concurrency.lockutils [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2087.639999] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053245, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2087.752222] env[62684]: DEBUG oslo_concurrency.lockutils [None req-23c10594-4f41-4e61-891d-92fb0c22d11b tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.876s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2087.753395] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b0fc5e07-8882-4be2-aaea-054d3d003737 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.490s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2087.754052] env[62684]: DEBUG nova.objects.instance [None req-b0fc5e07-8882-4be2-aaea-054d3d003737 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Lazy-loading 'resources' on Instance uuid 2f8f7e02-54fb-4275-badb-35c0b840ab33 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2087.769552] env[62684]: INFO nova.scheduler.client.report [None req-23c10594-4f41-4e61-891d-92fb0c22d11b tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Deleted allocations for instance 845b2e2a-cee0-4598-afbd-1f07aa52468f [ 2087.783212] env[62684]: DEBUG oslo_vmware.api [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053247, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2088.100727] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2088.101659] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4fbe3b12-51ab-4de3-81f2-04e4a39b14b6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.109734] env[62684]: DEBUG oslo_vmware.api [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Waiting for the task: (returnval){ [ 2088.109734] env[62684]: value = "task-2053256" [ 2088.109734] env[62684]: _type = "Task" [ 2088.109734] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2088.126962] env[62684]: DEBUG oslo_vmware.api [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Task: {'id': task-2053256, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2088.137808] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053245, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2088.281639] env[62684]: DEBUG oslo_concurrency.lockutils [None req-23c10594-4f41-4e61-891d-92fb0c22d11b tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Lock "845b2e2a-cee0-4598-afbd-1f07aa52468f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.589s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2088.286639] env[62684]: DEBUG oslo_vmware.api [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053247, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2088.500278] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e399eb1d-77cc-4e15-8fe0-f8ed68581864 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.507308] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b93bc67a-a9f2-4cb6-908f-e15e1b44b2f6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.538855] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46f98ffc-dbbe-4ac5-a2ba-181821069769 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.546216] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-495b3d7f-aafe-4881-99a3-70c167301e2a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.559781] env[62684]: DEBUG nova.compute.provider_tree [None req-b0fc5e07-8882-4be2-aaea-054d3d003737 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2088.619310] env[62684]: DEBUG oslo_vmware.api [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Task: {'id': task-2053256, 'name': PowerOffVM_Task, 'duration_secs': 0.122476} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2088.619600] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2088.619879] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2088.620575] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87a77192-c3cf-498b-90ae-aaed6ad5dd9a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.628088] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2088.634555] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-92963d2e-7088-450a-b583-4bb1f3d37c62 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.647479] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053245, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2088.665652] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2088.665873] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2088.666078] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Deleting the datastore file [datastore1] 4cf48f05-d643-47e6-9a0b-33415d80890c {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2088.666349] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-75c0519a-fc87-49f9-bd62-6a00a6481bb0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.673064] env[62684]: DEBUG oslo_vmware.api [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Waiting for the task: (returnval){ [ 2088.673064] env[62684]: value = "task-2053258" [ 2088.673064] env[62684]: _type = "Task" [ 2088.673064] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2088.680617] env[62684]: DEBUG oslo_vmware.api [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Task: {'id': task-2053258, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2088.789321] env[62684]: DEBUG oslo_vmware.api [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053247, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2088.798374] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-fbb14cc6-eed5-4ebd-8799-78b655876376 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2088.798374] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-fbb14cc6-eed5-4ebd-8799-78b655876376 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2088.798374] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbb14cc6-eed5-4ebd-8799-78b655876376 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Deleting the datastore file [datastore2] 983218ac-7cf3-48ef-88d8-aa9e9322df4b {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2088.798648] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-20d310e3-2144-4572-99d6-dd49a460e97f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.806313] env[62684]: DEBUG oslo_vmware.api [None req-fbb14cc6-eed5-4ebd-8799-78b655876376 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2088.806313] env[62684]: value = "task-2053259" [ 2088.806313] env[62684]: _type = "Task" [ 2088.806313] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2088.815560] env[62684]: DEBUG oslo_vmware.api [None req-fbb14cc6-eed5-4ebd-8799-78b655876376 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053259, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2089.065346] env[62684]: DEBUG nova.scheduler.client.report [None req-b0fc5e07-8882-4be2-aaea-054d3d003737 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2089.144144] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053245, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2089.188967] env[62684]: DEBUG oslo_vmware.api [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Task: {'id': task-2053258, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174271} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2089.189725] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2089.190101] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2089.190839] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2089.287130] env[62684]: DEBUG oslo_vmware.api [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053247, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2089.322567] env[62684]: DEBUG oslo_vmware.api [None req-fbb14cc6-eed5-4ebd-8799-78b655876376 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053259, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.21766} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2089.323311] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbb14cc6-eed5-4ebd-8799-78b655876376 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2089.323658] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-fbb14cc6-eed5-4ebd-8799-78b655876376 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2089.324197] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-fbb14cc6-eed5-4ebd-8799-78b655876376 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2089.325363] env[62684]: INFO nova.compute.manager [None req-fbb14cc6-eed5-4ebd-8799-78b655876376 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Took 3.86 seconds to destroy the instance on the hypervisor. [ 2089.325493] env[62684]: DEBUG oslo.service.loopingcall [None req-fbb14cc6-eed5-4ebd-8799-78b655876376 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2089.325867] env[62684]: DEBUG nova.compute.manager [-] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2089.326087] env[62684]: DEBUG nova.network.neutron [-] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2089.573741] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b0fc5e07-8882-4be2-aaea-054d3d003737 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.819s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2089.573741] env[62684]: DEBUG oslo_concurrency.lockutils [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 6.331s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2089.574286] env[62684]: DEBUG nova.objects.instance [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62684) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 2089.599585] env[62684]: INFO nova.scheduler.client.report [None req-b0fc5e07-8882-4be2-aaea-054d3d003737 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Deleted allocations for instance 2f8f7e02-54fb-4275-badb-35c0b840ab33 [ 2089.643185] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053245, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2089.785582] env[62684]: DEBUG oslo_vmware.api [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053247, 'name': ReconfigVM_Task, 'duration_secs': 8.112117} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2089.786134] env[62684]: DEBUG oslo_concurrency.lockutils [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Releasing lock "81b7949d-be24-46c9-8dc8-c249b65bb039" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2089.790020] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Reconfigured VM to attach interface {{(pid=62684) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 2090.108608] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b0fc5e07-8882-4be2-aaea-054d3d003737 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Lock "2f8f7e02-54fb-4275-badb-35c0b840ab33" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.630s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2090.145087] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053245, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2090.250240] env[62684]: DEBUG nova.virt.hardware [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2090.250841] env[62684]: DEBUG nova.virt.hardware [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2090.250841] env[62684]: DEBUG nova.virt.hardware [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2090.250841] env[62684]: DEBUG nova.virt.hardware [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2090.251232] env[62684]: DEBUG nova.virt.hardware [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2090.251232] env[62684]: DEBUG nova.virt.hardware [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2090.251499] env[62684]: DEBUG nova.virt.hardware [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2090.251499] env[62684]: DEBUG nova.virt.hardware [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2090.252107] env[62684]: DEBUG nova.virt.hardware [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2090.252107] env[62684]: DEBUG nova.virt.hardware [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2090.252107] env[62684]: DEBUG nova.virt.hardware [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2090.252885] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5230bfc-122d-46e3-8b1b-c50ab30be253 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.257992] env[62684]: DEBUG nova.compute.manager [req-0992dffa-897d-4f74-9e7a-f3d76b231585 req-a82198f9-6fb3-4079-91bc-72649d2fbb1a service nova] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Received event network-vif-deleted-b5e1d42c-c9c1-4c43-83b7-81eac7065383 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2090.258264] env[62684]: INFO nova.compute.manager [req-0992dffa-897d-4f74-9e7a-f3d76b231585 req-a82198f9-6fb3-4079-91bc-72649d2fbb1a service nova] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Neutron deleted interface b5e1d42c-c9c1-4c43-83b7-81eac7065383; detaching it from the instance and deleting it from the info cache [ 2090.258522] env[62684]: DEBUG nova.network.neutron [req-0992dffa-897d-4f74-9e7a-f3d76b231585 req-a82198f9-6fb3-4079-91bc-72649d2fbb1a service nova] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2090.265448] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23c8d0a1-619d-4278-a208-8a7170c6022b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.284359] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Instance VIF info [] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2090.292553] env[62684]: DEBUG oslo.service.loopingcall [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2090.294958] env[62684]: DEBUG oslo_concurrency.lockutils [None req-681dfe97-ed3f-4523-9bea-7120ac643530 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "interface-81b7949d-be24-46c9-8dc8-c249b65bb039-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 14.707s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2090.295207] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2090.295494] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4fd797e3-d6e5-4be9-9552-d2222172b832 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.316949] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2090.316949] env[62684]: value = "task-2053260" [ 2090.316949] env[62684]: _type = "Task" [ 2090.316949] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2090.327102] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053260, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2090.430793] env[62684]: DEBUG nova.network.neutron [-] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2090.586018] env[62684]: DEBUG oslo_concurrency.lockutils [None req-672db0ae-ed5d-43bf-988a-0e9c69ed6f09 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2090.587156] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 4.636s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2090.641805] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053245, 'name': CreateVM_Task, 'duration_secs': 9.070338} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2090.641979] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 23578214-6708-43ae-88ce-56212083532a] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2090.642749] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2090.642987] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2090.643339] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2090.643614] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-537e6f6c-2408-4802-9c2d-985f64adb290 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.650726] env[62684]: DEBUG oslo_vmware.api [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2090.650726] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b6cce7-5e0d-3349-616b-10a0b4b33036" [ 2090.650726] env[62684]: _type = "Task" [ 2090.650726] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2090.653812] env[62684]: DEBUG nova.network.neutron [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Successfully updated port: da310d7c-cd12-49ca-8014-efa9469aef45 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2090.658652] env[62684]: DEBUG oslo_vmware.api [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b6cce7-5e0d-3349-616b-10a0b4b33036, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2090.761429] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-07c6525d-9303-47b4-97d2-1a509f08b76a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.772951] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2753eee7-8a6b-44d7-b3d3-baa1f5da0f35 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.806328] env[62684]: DEBUG nova.compute.manager [req-0992dffa-897d-4f74-9e7a-f3d76b231585 req-a82198f9-6fb3-4079-91bc-72649d2fbb1a service nova] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Detach interface failed, port_id=b5e1d42c-c9c1-4c43-83b7-81eac7065383, reason: Instance 983218ac-7cf3-48ef-88d8-aa9e9322df4b could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2090.826975] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053260, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2090.934355] env[62684]: INFO nova.compute.manager [-] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Took 1.61 seconds to deallocate network for instance. [ 2091.160782] env[62684]: DEBUG oslo_concurrency.lockutils [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquiring lock "refresh_cache-daf1486b-d5c2-4341-8a27-36eeeb08cd26" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2091.160953] env[62684]: DEBUG oslo_concurrency.lockutils [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquired lock "refresh_cache-daf1486b-d5c2-4341-8a27-36eeeb08cd26" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2091.161133] env[62684]: DEBUG nova.network.neutron [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2091.162235] env[62684]: DEBUG oslo_vmware.api [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b6cce7-5e0d-3349-616b-10a0b4b33036, 'name': SearchDatastore_Task, 'duration_secs': 0.009232} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2091.166567] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2091.166814] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 23578214-6708-43ae-88ce-56212083532a] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2091.167187] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2091.167468] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2091.167696] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2091.168049] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-717de0cb-67ee-4ca9-90b6-5e5fd126eb54 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.178551] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2091.178778] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2091.179799] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-340365cc-a007-4bb6-840f-dcd25a0f75bf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.187937] env[62684]: DEBUG oslo_vmware.api [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2091.187937] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529679ac-6204-6e4d-5cf3-fb18a50471ae" [ 2091.187937] env[62684]: _type = "Task" [ 2091.187937] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2091.196146] env[62684]: DEBUG oslo_vmware.api [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529679ac-6204-6e4d-5cf3-fb18a50471ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2091.328643] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053260, 'name': CreateVM_Task, 'duration_secs': 0.517588} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2091.328871] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2091.329364] env[62684]: DEBUG oslo_concurrency.lockutils [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2091.329828] env[62684]: DEBUG oslo_concurrency.lockutils [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2091.330235] env[62684]: DEBUG oslo_concurrency.lockutils [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2091.330803] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1775f909-d5ba-40e1-858e-5b8e6086ca83 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.336730] env[62684]: DEBUG oslo_vmware.api [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Waiting for the task: (returnval){ [ 2091.336730] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52491c5c-8134-3890-b15d-bfc854bdf450" [ 2091.336730] env[62684]: _type = "Task" [ 2091.336730] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2091.345757] env[62684]: DEBUG oslo_vmware.api [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52491c5c-8134-3890-b15d-bfc854bdf450, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2091.441640] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fbb14cc6-eed5-4ebd-8799-78b655876376 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2091.520670] env[62684]: DEBUG nova.compute.manager [req-19f055c0-34ad-44a5-bb88-5dbf6b3c29f9 req-ec2d7763-db0f-4d5a-9db2-a148ae03017c service nova] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Received event network-vif-plugged-da310d7c-cd12-49ca-8014-efa9469aef45 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2091.520911] env[62684]: DEBUG oslo_concurrency.lockutils [req-19f055c0-34ad-44a5-bb88-5dbf6b3c29f9 req-ec2d7763-db0f-4d5a-9db2-a148ae03017c service nova] Acquiring lock "daf1486b-d5c2-4341-8a27-36eeeb08cd26-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2091.521339] env[62684]: DEBUG oslo_concurrency.lockutils [req-19f055c0-34ad-44a5-bb88-5dbf6b3c29f9 req-ec2d7763-db0f-4d5a-9db2-a148ae03017c service nova] Lock "daf1486b-d5c2-4341-8a27-36eeeb08cd26-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2091.521597] env[62684]: DEBUG oslo_concurrency.lockutils [req-19f055c0-34ad-44a5-bb88-5dbf6b3c29f9 req-ec2d7763-db0f-4d5a-9db2-a148ae03017c service nova] Lock "daf1486b-d5c2-4341-8a27-36eeeb08cd26-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2091.522219] env[62684]: DEBUG nova.compute.manager [req-19f055c0-34ad-44a5-bb88-5dbf6b3c29f9 req-ec2d7763-db0f-4d5a-9db2-a148ae03017c service nova] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] No waiting events found dispatching network-vif-plugged-da310d7c-cd12-49ca-8014-efa9469aef45 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2091.522714] env[62684]: WARNING nova.compute.manager [req-19f055c0-34ad-44a5-bb88-5dbf6b3c29f9 req-ec2d7763-db0f-4d5a-9db2-a148ae03017c service nova] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Received unexpected event network-vif-plugged-da310d7c-cd12-49ca-8014-efa9469aef45 for instance with vm_state building and task_state spawning. [ 2091.642958] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 025dfe36-1f14-4bda-84a0-d424364b745b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2091.642958] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2091.642958] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance b1f70e39-bf37-4fb8-b95b-653b59bec265 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2091.642958] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance ca3d1a73-6f3b-4278-8fe7-03b66f407ba6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2091.642958] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 57537508-06e7-43a4-95c5-c4399b8bf93f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2091.642958] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 0156d807-1ab4-482f-91d1-172bf32bf23c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2091.642958] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 2baabe7a-ed33-4cef-9acc-a7b804610b0a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2091.642958] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 81b7949d-be24-46c9-8dc8-c249b65bb039 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2091.642958] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 42ae6edd-e1f5-4ef8-a248-8f02e94d798e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2091.642958] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 9964237b-db9b-49cc-a9bd-d62329ea564e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2091.642958] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 983218ac-7cf3-48ef-88d8-aa9e9322df4b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2091.642958] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 23578214-6708-43ae-88ce-56212083532a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2091.642958] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 4cf48f05-d643-47e6-9a0b-33415d80890c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2091.642958] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance daf1486b-d5c2-4341-8a27-36eeeb08cd26 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2091.702231] env[62684]: DEBUG oslo_vmware.api [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529679ac-6204-6e4d-5cf3-fb18a50471ae, 'name': SearchDatastore_Task, 'duration_secs': 0.009318} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2091.703892] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e87c92dc-cd83-4eff-9c2a-3bc8b78e49ca {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.710052] env[62684]: DEBUG oslo_vmware.api [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2091.710052] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523f89fa-5876-9911-6fe9-726018d5ee6b" [ 2091.710052] env[62684]: _type = "Task" [ 2091.710052] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2091.720122] env[62684]: DEBUG oslo_vmware.api [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523f89fa-5876-9911-6fe9-726018d5ee6b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2091.721475] env[62684]: DEBUG nova.network.neutron [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2091.852545] env[62684]: DEBUG oslo_vmware.api [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52491c5c-8134-3890-b15d-bfc854bdf450, 'name': SearchDatastore_Task, 'duration_secs': 0.010093} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2091.853310] env[62684]: DEBUG oslo_concurrency.lockutils [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2091.853832] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2091.854224] env[62684]: DEBUG oslo_concurrency.lockutils [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2091.956259] env[62684]: DEBUG nova.network.neutron [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Updating instance_info_cache with network_info: [{"id": "da310d7c-cd12-49ca-8014-efa9469aef45", "address": "fa:16:3e:5a:6f:ea", "network": {"id": "64494ea7-f6d9-430c-8ac7-e876e763004b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2056829508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e57b232a7e7647c7a3b2bca3c096feb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda310d7c-cd", "ovs_interfaceid": "da310d7c-cd12-49ca-8014-efa9469aef45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2092.099419] env[62684]: DEBUG oslo_concurrency.lockutils [None req-631ae091-e487-4050-acd5-c5d701be3097 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Acquiring lock "9964237b-db9b-49cc-a9bd-d62329ea564e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2092.099754] env[62684]: DEBUG oslo_concurrency.lockutils [None req-631ae091-e487-4050-acd5-c5d701be3097 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Lock "9964237b-db9b-49cc-a9bd-d62329ea564e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2092.100016] env[62684]: DEBUG oslo_concurrency.lockutils [None req-631ae091-e487-4050-acd5-c5d701be3097 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Acquiring lock "9964237b-db9b-49cc-a9bd-d62329ea564e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2092.100252] env[62684]: DEBUG oslo_concurrency.lockutils [None req-631ae091-e487-4050-acd5-c5d701be3097 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Lock "9964237b-db9b-49cc-a9bd-d62329ea564e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2092.100436] env[62684]: DEBUG oslo_concurrency.lockutils [None req-631ae091-e487-4050-acd5-c5d701be3097 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Lock "9964237b-db9b-49cc-a9bd-d62329ea564e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2092.106073] env[62684]: INFO nova.compute.manager [None req-631ae091-e487-4050-acd5-c5d701be3097 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 9964237b-db9b-49cc-a9bd-d62329ea564e] Terminating instance [ 2092.109703] env[62684]: DEBUG oslo_concurrency.lockutils [None req-631ae091-e487-4050-acd5-c5d701be3097 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Acquiring lock "refresh_cache-9964237b-db9b-49cc-a9bd-d62329ea564e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2092.111325] env[62684]: DEBUG oslo_concurrency.lockutils [None req-631ae091-e487-4050-acd5-c5d701be3097 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Acquired lock "refresh_cache-9964237b-db9b-49cc-a9bd-d62329ea564e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2092.111589] env[62684]: DEBUG nova.network.neutron [None req-631ae091-e487-4050-acd5-c5d701be3097 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 9964237b-db9b-49cc-a9bd-d62329ea564e] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2092.146583] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 28886f7c-6efc-4505-84f6-682d75cea215 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2092.147218] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Total usable vcpus: 48, total allocated vcpus: 14 {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2092.147218] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3200MB phys_disk=200GB used_disk=14GB total_vcpus=48 used_vcpus=14 pci_stats=[] {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2092.227315] env[62684]: DEBUG oslo_vmware.api [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523f89fa-5876-9911-6fe9-726018d5ee6b, 'name': SearchDatastore_Task, 'duration_secs': 0.010485} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2092.227945] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2092.228154] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 23578214-6708-43ae-88ce-56212083532a/23578214-6708-43ae-88ce-56212083532a.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2092.228412] env[62684]: DEBUG oslo_concurrency.lockutils [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2092.228638] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2092.229033] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ca491694-10da-4f92-8c6f-12dc57ef0878 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.232369] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d2800571-18dc-4bf6-b813-74ddea287511 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.248291] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2092.248518] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2092.249376] env[62684]: DEBUG oslo_vmware.api [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2092.249376] env[62684]: value = "task-2053261" [ 2092.249376] env[62684]: _type = "Task" [ 2092.249376] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2092.249661] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76081a80-edf3-4b82-a8cb-4322f4f6e1aa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.259288] env[62684]: DEBUG oslo_vmware.api [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Waiting for the task: (returnval){ [ 2092.259288] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5269b977-1a56-ea81-c5e4-98f07e5591d9" [ 2092.259288] env[62684]: _type = "Task" [ 2092.259288] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2092.265949] env[62684]: DEBUG oslo_vmware.api [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053261, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2092.281584] env[62684]: DEBUG oslo_vmware.api [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5269b977-1a56-ea81-c5e4-98f07e5591d9, 'name': SearchDatastore_Task, 'duration_secs': 0.009098} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2092.285253] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a09d2f9f-ecff-4fdd-83b5-8fb2fc0ee4ce {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.291881] env[62684]: DEBUG oslo_vmware.api [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Waiting for the task: (returnval){ [ 2092.291881] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e11170-130e-19cb-e0ab-ba588f6d0894" [ 2092.291881] env[62684]: _type = "Task" [ 2092.291881] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2092.301231] env[62684]: DEBUG oslo_vmware.api [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e11170-130e-19cb-e0ab-ba588f6d0894, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2092.332763] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "8cc68353-4678-4ee7-8c0d-3d71e6bf05bf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2092.333086] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "8cc68353-4678-4ee7-8c0d-3d71e6bf05bf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2092.456373] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aa66073-4d70-4cf5-8633-29675fc5b1ea {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.461977] env[62684]: DEBUG oslo_concurrency.lockutils [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Releasing lock "refresh_cache-daf1486b-d5c2-4341-8a27-36eeeb08cd26" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2092.462300] env[62684]: DEBUG nova.compute.manager [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Instance network_info: |[{"id": "da310d7c-cd12-49ca-8014-efa9469aef45", "address": "fa:16:3e:5a:6f:ea", "network": {"id": "64494ea7-f6d9-430c-8ac7-e876e763004b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2056829508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e57b232a7e7647c7a3b2bca3c096feb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda310d7c-cd", "ovs_interfaceid": "da310d7c-cd12-49ca-8014-efa9469aef45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2092.462838] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5a:6f:ea', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6966f473-59ac-49bb-9b7a-22c61f4e61e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'da310d7c-cd12-49ca-8014-efa9469aef45', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2092.475150] env[62684]: DEBUG oslo.service.loopingcall [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2092.476434] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2092.480905] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b98901e2-d2cf-4089-a8ff-a70b132d888e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.498899] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d2f646f-0ad5-4f33-ba98-3027faddfa22 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.538810] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22c5416d-8490-40fd-ad34-7aa5b9234fff {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.541742] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2092.541742] env[62684]: value = "task-2053262" [ 2092.541742] env[62684]: _type = "Task" [ 2092.541742] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2092.549241] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f2fcf97-35b2-4684-98c2-ffc0fe7397a6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.559081] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053262, 'name': CreateVM_Task} progress is 15%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2092.569716] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2092.637728] env[62684]: DEBUG nova.network.neutron [None req-631ae091-e487-4050-acd5-c5d701be3097 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 9964237b-db9b-49cc-a9bd-d62329ea564e] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2092.712836] env[62684]: DEBUG nova.network.neutron [None req-631ae091-e487-4050-acd5-c5d701be3097 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 9964237b-db9b-49cc-a9bd-d62329ea564e] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2092.764913] env[62684]: DEBUG oslo_vmware.api [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053261, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.461208} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2092.764913] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 23578214-6708-43ae-88ce-56212083532a/23578214-6708-43ae-88ce-56212083532a.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2092.764913] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 23578214-6708-43ae-88ce-56212083532a] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2092.764913] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9fb882b0-6e6c-48b8-a274-d24dd35a2c1f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.773429] env[62684]: DEBUG oslo_vmware.api [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2092.773429] env[62684]: value = "task-2053263" [ 2092.773429] env[62684]: _type = "Task" [ 2092.773429] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2092.780962] env[62684]: DEBUG oslo_vmware.api [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053263, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2092.801873] env[62684]: DEBUG oslo_vmware.api [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e11170-130e-19cb-e0ab-ba588f6d0894, 'name': SearchDatastore_Task, 'duration_secs': 0.008662} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2092.802185] env[62684]: DEBUG oslo_concurrency.lockutils [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2092.802529] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 4cf48f05-d643-47e6-9a0b-33415d80890c/4cf48f05-d643-47e6-9a0b-33415d80890c.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2092.802834] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-26bdb8f6-22d0-45de-a429-5ea67c47a167 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.809765] env[62684]: DEBUG oslo_vmware.api [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Waiting for the task: (returnval){ [ 2092.809765] env[62684]: value = "task-2053264" [ 2092.809765] env[62684]: _type = "Task" [ 2092.809765] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2092.817295] env[62684]: DEBUG oslo_vmware.api [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Task: {'id': task-2053264, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2092.838576] env[62684]: DEBUG nova.compute.manager [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2092.974189] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Acquiring lock "e8c90faa-2c25-4308-9781-80d308b9211c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2092.974189] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Lock "e8c90faa-2c25-4308-9781-80d308b9211c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2093.051981] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053262, 'name': CreateVM_Task, 'duration_secs': 0.457768} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2093.052194] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2093.052927] env[62684]: DEBUG oslo_concurrency.lockutils [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2093.053170] env[62684]: DEBUG oslo_concurrency.lockutils [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2093.053468] env[62684]: DEBUG oslo_concurrency.lockutils [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2093.054397] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23396c87-bc2d-4848-8fdd-835a697f8d7f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.060157] env[62684]: DEBUG oslo_vmware.api [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 2093.060157] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52cbcaf0-12eb-ca43-4e57-5b9d9420d417" [ 2093.060157] env[62684]: _type = "Task" [ 2093.060157] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2093.069188] env[62684]: DEBUG oslo_vmware.api [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52cbcaf0-12eb-ca43-4e57-5b9d9420d417, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2093.099024] env[62684]: ERROR nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [req-fad8a20e-a78b-4e0d-b7cd-cdb9798260be] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c23c281e-ec1f-4876-972e-a98655f2084f. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-fad8a20e-a78b-4e0d-b7cd-cdb9798260be"}]} [ 2093.126820] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Refreshing inventories for resource provider c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2093.146093] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Updating ProviderTree inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2093.146332] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2093.162578] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Refreshing aggregate associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, aggregates: None {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2093.185247] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Refreshing trait associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2093.216810] env[62684]: DEBUG oslo_concurrency.lockutils [None req-631ae091-e487-4050-acd5-c5d701be3097 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Releasing lock "refresh_cache-9964237b-db9b-49cc-a9bd-d62329ea564e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2093.219219] env[62684]: DEBUG nova.compute.manager [None req-631ae091-e487-4050-acd5-c5d701be3097 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 9964237b-db9b-49cc-a9bd-d62329ea564e] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2093.219219] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-631ae091-e487-4050-acd5-c5d701be3097 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 9964237b-db9b-49cc-a9bd-d62329ea564e] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2093.220344] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47454730-c421-4a52-a3cf-e0f2f817e593 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.236306] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-631ae091-e487-4050-acd5-c5d701be3097 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 9964237b-db9b-49cc-a9bd-d62329ea564e] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2093.236654] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cc401396-86c6-4455-b93d-6f123034de57 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.249112] env[62684]: DEBUG oslo_vmware.api [None req-631ae091-e487-4050-acd5-c5d701be3097 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Waiting for the task: (returnval){ [ 2093.249112] env[62684]: value = "task-2053265" [ 2093.249112] env[62684]: _type = "Task" [ 2093.249112] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2093.259377] env[62684]: DEBUG oslo_vmware.api [None req-631ae091-e487-4050-acd5-c5d701be3097 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053265, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2093.284092] env[62684]: DEBUG oslo_vmware.api [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053263, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060385} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2093.284092] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 23578214-6708-43ae-88ce-56212083532a] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2093.284877] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-534a7f7c-fabf-47eb-b307-6e883ee54821 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.310199] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 23578214-6708-43ae-88ce-56212083532a] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] 23578214-6708-43ae-88ce-56212083532a/23578214-6708-43ae-88ce-56212083532a.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2093.314214] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-92b1861a-7d6e-4ac7-acc7-176f4fcec90a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.344949] env[62684]: DEBUG oslo_vmware.api [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Task: {'id': task-2053264, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.472131} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2093.349711] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 4cf48f05-d643-47e6-9a0b-33415d80890c/4cf48f05-d643-47e6-9a0b-33415d80890c.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2093.349897] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2093.350427] env[62684]: DEBUG oslo_vmware.api [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2093.350427] env[62684]: value = "task-2053266" [ 2093.350427] env[62684]: _type = "Task" [ 2093.350427] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2093.354104] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-37da200c-356f-40ef-aad4-87aeb163178a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.368341] env[62684]: DEBUG oslo_vmware.api [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053266, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2093.370196] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2093.371026] env[62684]: DEBUG oslo_vmware.api [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Waiting for the task: (returnval){ [ 2093.371026] env[62684]: value = "task-2053267" [ 2093.371026] env[62684]: _type = "Task" [ 2093.371026] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2093.383243] env[62684]: DEBUG oslo_vmware.api [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Task: {'id': task-2053267, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2093.478531] env[62684]: DEBUG nova.compute.manager [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2093.568858] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5823fb00-664d-4620-85b6-0659b9af1f81 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.579203] env[62684]: DEBUG oslo_vmware.api [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52cbcaf0-12eb-ca43-4e57-5b9d9420d417, 'name': SearchDatastore_Task, 'duration_secs': 0.022289} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2093.580215] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51b2c0ba-9778-415b-8178-1acf8f1e6527 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.584332] env[62684]: DEBUG oslo_concurrency.lockutils [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2093.584457] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2093.584738] env[62684]: DEBUG oslo_concurrency.lockutils [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2093.585041] env[62684]: DEBUG oslo_concurrency.lockutils [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2093.585237] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2093.585578] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3583308a-e048-4b15-93f7-0ca33547ed22 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.620127] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ee2db98-ce73-42e5-bdc4-1dc65b01ec21 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.622934] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2093.623100] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2093.623740] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb67d54f-e3ac-4a15-993e-b615a64862b5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.635706] env[62684]: DEBUG oslo_vmware.api [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 2093.635706] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52936828-bc90-c277-e9ec-966fd8c60ddc" [ 2093.635706] env[62684]: _type = "Task" [ 2093.635706] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2093.639816] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-714deceb-3d4f-4ddd-9036-5d86fd5625d7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.657892] env[62684]: DEBUG oslo_vmware.api [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52936828-bc90-c277-e9ec-966fd8c60ddc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2093.658797] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2093.759630] env[62684]: DEBUG oslo_vmware.api [None req-631ae091-e487-4050-acd5-c5d701be3097 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053265, 'name': PowerOffVM_Task, 'duration_secs': 0.19221} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2093.759630] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-631ae091-e487-4050-acd5-c5d701be3097 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 9964237b-db9b-49cc-a9bd-d62329ea564e] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2093.759859] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-631ae091-e487-4050-acd5-c5d701be3097 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 9964237b-db9b-49cc-a9bd-d62329ea564e] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2093.760098] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-772043e7-c770-4d82-9073-3a18d5f1a615 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.789990] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-631ae091-e487-4050-acd5-c5d701be3097 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 9964237b-db9b-49cc-a9bd-d62329ea564e] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2093.789990] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-631ae091-e487-4050-acd5-c5d701be3097 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 9964237b-db9b-49cc-a9bd-d62329ea564e] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2093.789990] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-631ae091-e487-4050-acd5-c5d701be3097 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Deleting the datastore file [datastore2] 9964237b-db9b-49cc-a9bd-d62329ea564e {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2093.789990] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f03cca35-752f-4c31-8993-598deb2cdb66 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.796085] env[62684]: DEBUG oslo_vmware.api [None req-631ae091-e487-4050-acd5-c5d701be3097 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Waiting for the task: (returnval){ [ 2093.796085] env[62684]: value = "task-2053269" [ 2093.796085] env[62684]: _type = "Task" [ 2093.796085] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2093.806341] env[62684]: DEBUG oslo_vmware.api [None req-631ae091-e487-4050-acd5-c5d701be3097 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053269, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2093.833218] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5f06dd05-77ef-41de-9558-bb440a919309 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "interface-81b7949d-be24-46c9-8dc8-c249b65bb039-5f8a8ef1-7f32-48b8-9444-b5fc1ab25e68" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2093.833737] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5f06dd05-77ef-41de-9558-bb440a919309 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "interface-81b7949d-be24-46c9-8dc8-c249b65bb039-5f8a8ef1-7f32-48b8-9444-b5fc1ab25e68" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2093.833940] env[62684]: DEBUG nova.objects.instance [None req-5f06dd05-77ef-41de-9558-bb440a919309 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lazy-loading 'flavor' on Instance uuid 81b7949d-be24-46c9-8dc8-c249b65bb039 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2093.868827] env[62684]: DEBUG oslo_vmware.api [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053266, 'name': ReconfigVM_Task, 'duration_secs': 0.304158} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2093.869154] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 23578214-6708-43ae-88ce-56212083532a] Reconfigured VM instance instance-0000004e to attach disk [datastore1] 23578214-6708-43ae-88ce-56212083532a/23578214-6708-43ae-88ce-56212083532a.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2093.869826] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3569184a-3593-4432-b0ec-44e00f775219 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.881659] env[62684]: DEBUG oslo_vmware.api [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Task: {'id': task-2053267, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071713} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2093.884518] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2093.884518] env[62684]: DEBUG oslo_vmware.api [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2093.884518] env[62684]: value = "task-2053270" [ 2093.884518] env[62684]: _type = "Task" [ 2093.884518] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2093.884518] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4448177-e3f5-4899-abeb-2687c23ed64e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.913762] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] 4cf48f05-d643-47e6-9a0b-33415d80890c/4cf48f05-d643-47e6-9a0b-33415d80890c.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2093.914546] env[62684]: DEBUG oslo_vmware.api [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053270, 'name': Rename_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2093.914793] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-052175c1-e94b-46f1-a078-fdfd0691d02b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.937042] env[62684]: DEBUG oslo_vmware.api [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Waiting for the task: (returnval){ [ 2093.937042] env[62684]: value = "task-2053271" [ 2093.937042] env[62684]: _type = "Task" [ 2093.937042] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2093.945868] env[62684]: DEBUG oslo_vmware.api [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Task: {'id': task-2053271, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2093.957824] env[62684]: DEBUG oslo_concurrency.lockutils [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Acquiring lock "3a967adf-8c46-4787-b1d1-4ed701399576" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2093.957824] env[62684]: DEBUG oslo_concurrency.lockutils [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Lock "3a967adf-8c46-4787-b1d1-4ed701399576" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2093.960188] env[62684]: DEBUG nova.compute.manager [req-c4150fd0-9c17-4796-9ead-13479b9bd924 req-400ebe65-5860-4afc-b3fa-4b2ce76e02ff service nova] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Received event network-changed-da310d7c-cd12-49ca-8014-efa9469aef45 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2093.960403] env[62684]: DEBUG nova.compute.manager [req-c4150fd0-9c17-4796-9ead-13479b9bd924 req-400ebe65-5860-4afc-b3fa-4b2ce76e02ff service nova] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Refreshing instance network info cache due to event network-changed-da310d7c-cd12-49ca-8014-efa9469aef45. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2093.960629] env[62684]: DEBUG oslo_concurrency.lockutils [req-c4150fd0-9c17-4796-9ead-13479b9bd924 req-400ebe65-5860-4afc-b3fa-4b2ce76e02ff service nova] Acquiring lock "refresh_cache-daf1486b-d5c2-4341-8a27-36eeeb08cd26" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2093.960870] env[62684]: DEBUG oslo_concurrency.lockutils [req-c4150fd0-9c17-4796-9ead-13479b9bd924 req-400ebe65-5860-4afc-b3fa-4b2ce76e02ff service nova] Acquired lock "refresh_cache-daf1486b-d5c2-4341-8a27-36eeeb08cd26" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2093.962230] env[62684]: DEBUG nova.network.neutron [req-c4150fd0-9c17-4796-9ead-13479b9bd924 req-400ebe65-5860-4afc-b3fa-4b2ce76e02ff service nova] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Refreshing network info cache for port da310d7c-cd12-49ca-8014-efa9469aef45 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2094.003785] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2094.148381] env[62684]: DEBUG oslo_vmware.api [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52936828-bc90-c277-e9ec-966fd8c60ddc, 'name': SearchDatastore_Task, 'duration_secs': 0.03582} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2094.149315] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-580433ed-7a4b-47d6-a3a0-56509133ba30 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.155280] env[62684]: DEBUG oslo_vmware.api [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 2094.155280] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52bd2dbc-e979-89cb-3905-04bc3d246fb6" [ 2094.155280] env[62684]: _type = "Task" [ 2094.155280] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2094.167770] env[62684]: DEBUG oslo_vmware.api [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52bd2dbc-e979-89cb-3905-04bc3d246fb6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2094.190214] env[62684]: ERROR nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [req-944aad97-be2a-46de-b293-fbcda4995f9d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c23c281e-ec1f-4876-972e-a98655f2084f. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-944aad97-be2a-46de-b293-fbcda4995f9d"}]} [ 2094.209575] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Refreshing inventories for resource provider c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2094.226877] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Updating ProviderTree inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2094.227108] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2094.240971] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Refreshing aggregate associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, aggregates: None {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2094.261124] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Refreshing trait associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2094.309606] env[62684]: DEBUG oslo_vmware.api [None req-631ae091-e487-4050-acd5-c5d701be3097 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Task: {'id': task-2053269, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.109707} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2094.309606] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-631ae091-e487-4050-acd5-c5d701be3097 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2094.309606] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-631ae091-e487-4050-acd5-c5d701be3097 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 9964237b-db9b-49cc-a9bd-d62329ea564e] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2094.309606] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-631ae091-e487-4050-acd5-c5d701be3097 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 9964237b-db9b-49cc-a9bd-d62329ea564e] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2094.309606] env[62684]: INFO nova.compute.manager [None req-631ae091-e487-4050-acd5-c5d701be3097 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] [instance: 9964237b-db9b-49cc-a9bd-d62329ea564e] Took 1.09 seconds to destroy the instance on the hypervisor. [ 2094.309871] env[62684]: DEBUG oslo.service.loopingcall [None req-631ae091-e487-4050-acd5-c5d701be3097 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2094.309871] env[62684]: DEBUG nova.compute.manager [-] [instance: 9964237b-db9b-49cc-a9bd-d62329ea564e] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2094.309967] env[62684]: DEBUG nova.network.neutron [-] [instance: 9964237b-db9b-49cc-a9bd-d62329ea564e] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2094.325691] env[62684]: DEBUG nova.network.neutron [-] [instance: 9964237b-db9b-49cc-a9bd-d62329ea564e] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2094.397773] env[62684]: DEBUG oslo_vmware.api [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053270, 'name': Rename_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2094.448667] env[62684]: DEBUG oslo_vmware.api [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Task: {'id': task-2053271, 'name': ReconfigVM_Task, 'duration_secs': 0.27119} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2094.448948] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Reconfigured VM instance instance-0000004f to attach disk [datastore1] 4cf48f05-d643-47e6-9a0b-33415d80890c/4cf48f05-d643-47e6-9a0b-33415d80890c.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2094.449572] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-be88363b-5b35-44b2-8136-b52dcef54a53 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.456931] env[62684]: DEBUG oslo_vmware.api [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Waiting for the task: (returnval){ [ 2094.456931] env[62684]: value = "task-2053272" [ 2094.456931] env[62684]: _type = "Task" [ 2094.456931] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2094.460214] env[62684]: DEBUG nova.objects.instance [None req-5f06dd05-77ef-41de-9558-bb440a919309 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lazy-loading 'pci_requests' on Instance uuid 81b7949d-be24-46c9-8dc8-c249b65bb039 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2094.461368] env[62684]: DEBUG nova.compute.manager [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2094.472142] env[62684]: DEBUG oslo_vmware.api [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Task: {'id': task-2053272, 'name': Rename_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2094.539631] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d0f53f2-483a-42a2-84ca-1975dc279947 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.550250] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-735ad239-42da-4ba6-9133-cd262f9c6475 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.585888] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-466aee3b-5e46-44e5-8dd3-313bbe975fee {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.593668] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdd26504-c8c8-4471-acea-fbbdabf7b36f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.608754] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2094.666811] env[62684]: DEBUG oslo_vmware.api [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52bd2dbc-e979-89cb-3905-04bc3d246fb6, 'name': SearchDatastore_Task, 'duration_secs': 0.009972} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2094.667239] env[62684]: DEBUG oslo_concurrency.lockutils [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2094.667572] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] daf1486b-d5c2-4341-8a27-36eeeb08cd26/daf1486b-d5c2-4341-8a27-36eeeb08cd26.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2094.667891] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-98c18545-5bee-4ee7-9653-916aab779298 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.674448] env[62684]: DEBUG oslo_vmware.api [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 2094.674448] env[62684]: value = "task-2053273" [ 2094.674448] env[62684]: _type = "Task" [ 2094.674448] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2094.686784] env[62684]: DEBUG oslo_vmware.api [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053273, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2094.721827] env[62684]: DEBUG nova.network.neutron [req-c4150fd0-9c17-4796-9ead-13479b9bd924 req-400ebe65-5860-4afc-b3fa-4b2ce76e02ff service nova] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Updated VIF entry in instance network info cache for port da310d7c-cd12-49ca-8014-efa9469aef45. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2094.722381] env[62684]: DEBUG nova.network.neutron [req-c4150fd0-9c17-4796-9ead-13479b9bd924 req-400ebe65-5860-4afc-b3fa-4b2ce76e02ff service nova] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Updating instance_info_cache with network_info: [{"id": "da310d7c-cd12-49ca-8014-efa9469aef45", "address": "fa:16:3e:5a:6f:ea", "network": {"id": "64494ea7-f6d9-430c-8ac7-e876e763004b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2056829508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e57b232a7e7647c7a3b2bca3c096feb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda310d7c-cd", "ovs_interfaceid": "da310d7c-cd12-49ca-8014-efa9469aef45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2094.828358] env[62684]: DEBUG nova.network.neutron [-] [instance: 9964237b-db9b-49cc-a9bd-d62329ea564e] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2094.898874] env[62684]: DEBUG oslo_vmware.api [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053270, 'name': Rename_Task, 'duration_secs': 0.833853} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2094.899215] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 23578214-6708-43ae-88ce-56212083532a] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2094.899488] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-38453894-b804-4f6a-a724-1c705ebe38bb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.906551] env[62684]: DEBUG oslo_vmware.api [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2094.906551] env[62684]: value = "task-2053274" [ 2094.906551] env[62684]: _type = "Task" [ 2094.906551] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2094.915013] env[62684]: DEBUG oslo_vmware.api [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053274, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2094.967733] env[62684]: DEBUG nova.objects.base [None req-5f06dd05-77ef-41de-9558-bb440a919309 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Object Instance<81b7949d-be24-46c9-8dc8-c249b65bb039> lazy-loaded attributes: flavor,pci_requests {{(pid=62684) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2094.967872] env[62684]: DEBUG nova.network.neutron [None req-5f06dd05-77ef-41de-9558-bb440a919309 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2094.970239] env[62684]: DEBUG oslo_vmware.api [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Task: {'id': task-2053272, 'name': Rename_Task, 'duration_secs': 0.149067} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2094.973062] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2094.973693] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-642e1e76-f950-4ece-b612-14dc9fb20593 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.984598] env[62684]: DEBUG oslo_vmware.api [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Waiting for the task: (returnval){ [ 2094.984598] env[62684]: value = "task-2053275" [ 2094.984598] env[62684]: _type = "Task" [ 2094.984598] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2094.991444] env[62684]: DEBUG oslo_concurrency.lockutils [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2094.998490] env[62684]: DEBUG oslo_vmware.api [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Task: {'id': task-2053275, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2095.059557] env[62684]: DEBUG nova.policy [None req-5f06dd05-77ef-41de-9558-bb440a919309 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e957449ae9d24bdaba38b3db704d3d61', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5cb4900a999e467bafdfd1fb407a82f4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2095.133117] env[62684]: ERROR nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [req-1cce3edd-fe22-480a-902f-d3fb494972f0] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c23c281e-ec1f-4876-972e-a98655f2084f. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1cce3edd-fe22-480a-902f-d3fb494972f0"}]} [ 2095.160334] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Refreshing inventories for resource provider c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2095.182758] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Updating ProviderTree inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2095.183383] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2095.190279] env[62684]: DEBUG oslo_vmware.api [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053273, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.463105} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2095.191171] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] daf1486b-d5c2-4341-8a27-36eeeb08cd26/daf1486b-d5c2-4341-8a27-36eeeb08cd26.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2095.191593] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2095.191593] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b337cce2-b5f2-4a19-9745-e1e9bc71d44e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.199378] env[62684]: DEBUG oslo_vmware.api [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 2095.199378] env[62684]: value = "task-2053276" [ 2095.199378] env[62684]: _type = "Task" [ 2095.199378] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2095.206593] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Refreshing aggregate associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, aggregates: None {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2095.218424] env[62684]: DEBUG oslo_vmware.api [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053276, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2095.225425] env[62684]: DEBUG oslo_concurrency.lockutils [req-c4150fd0-9c17-4796-9ead-13479b9bd924 req-400ebe65-5860-4afc-b3fa-4b2ce76e02ff service nova] Releasing lock "refresh_cache-daf1486b-d5c2-4341-8a27-36eeeb08cd26" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2095.235726] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Refreshing trait associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2095.245630] env[62684]: DEBUG oslo_concurrency.lockutils [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Acquiring lock "2eab4a07-9b92-436e-b4f8-fa64ae949b56" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2095.247939] env[62684]: DEBUG oslo_concurrency.lockutils [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Lock "2eab4a07-9b92-436e-b4f8-fa64ae949b56" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2095.330733] env[62684]: INFO nova.compute.manager [-] [instance: 9964237b-db9b-49cc-a9bd-d62329ea564e] Took 1.02 seconds to deallocate network for instance. [ 2095.420417] env[62684]: DEBUG oslo_vmware.api [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053274, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2095.496615] env[62684]: DEBUG oslo_vmware.api [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Task: {'id': task-2053275, 'name': PowerOnVM_Task, 'duration_secs': 0.482786} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2095.497734] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2095.497734] env[62684]: DEBUG nova.compute.manager [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2095.497956] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00a4b48a-138a-4f0d-ba04-b0de28052eba {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.529688] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8742f2d9-fa80-43bd-abd3-6bd782e8d5f4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.538109] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-728adbbe-816c-455b-8911-f4ec609e0b92 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.568516] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-857ae19b-88de-4c1d-86b8-692fdd63dfad {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.577122] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdbbc05f-9fd1-42b4-b618-0ae63e212dfb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.593067] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2095.709701] env[62684]: DEBUG oslo_vmware.api [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053276, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069272} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2095.710013] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2095.710795] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20f0a453-ac18-4266-8f11-f30c9fcdd63d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.733209] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Reconfiguring VM instance instance-00000050 to attach disk [datastore2] daf1486b-d5c2-4341-8a27-36eeeb08cd26/daf1486b-d5c2-4341-8a27-36eeeb08cd26.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2095.734078] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2d6d514a-3042-4cb4-b2df-f2626ff36658 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.748141] env[62684]: DEBUG nova.compute.manager [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2095.756714] env[62684]: DEBUG oslo_vmware.api [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 2095.756714] env[62684]: value = "task-2053277" [ 2095.756714] env[62684]: _type = "Task" [ 2095.756714] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2095.764423] env[62684]: DEBUG oslo_vmware.api [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053277, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2095.837498] env[62684]: DEBUG oslo_concurrency.lockutils [None req-631ae091-e487-4050-acd5-c5d701be3097 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2095.918731] env[62684]: DEBUG oslo_vmware.api [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053274, 'name': PowerOnVM_Task, 'duration_secs': 0.524787} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2095.919079] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 23578214-6708-43ae-88ce-56212083532a] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2095.919299] env[62684]: INFO nova.compute.manager [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 23578214-6708-43ae-88ce-56212083532a] Took 17.68 seconds to spawn the instance on the hypervisor. [ 2095.919489] env[62684]: DEBUG nova.compute.manager [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 23578214-6708-43ae-88ce-56212083532a] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2095.920315] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28a86e9a-697a-4dda-9b5d-1848a524f70a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.954044] env[62684]: DEBUG oslo_concurrency.lockutils [None req-eedf3b9d-16d2-4e27-882c-51007c79a95a tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "57537508-06e7-43a4-95c5-c4399b8bf93f" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2095.954044] env[62684]: DEBUG oslo_concurrency.lockutils [None req-eedf3b9d-16d2-4e27-882c-51007c79a95a tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "57537508-06e7-43a4-95c5-c4399b8bf93f" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2095.954044] env[62684]: DEBUG nova.compute.manager [None req-eedf3b9d-16d2-4e27-882c-51007c79a95a tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2095.954044] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a0ae062-7f80-4d18-8dcc-0e06ac4df17e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.960205] env[62684]: DEBUG nova.compute.manager [None req-eedf3b9d-16d2-4e27-882c-51007c79a95a tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62684) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 2095.960989] env[62684]: DEBUG nova.objects.instance [None req-eedf3b9d-16d2-4e27-882c-51007c79a95a tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lazy-loading 'flavor' on Instance uuid 57537508-06e7-43a4-95c5-c4399b8bf93f {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2096.019150] env[62684]: DEBUG oslo_concurrency.lockutils [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2096.127532] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Updated inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f with generation 119 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2096.127532] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Updating resource provider c23c281e-ec1f-4876-972e-a98655f2084f generation from 119 to 120 during operation: update_inventory {{(pid=62684) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2096.127792] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2096.273036] env[62684]: DEBUG oslo_vmware.api [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053277, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2096.276850] env[62684]: DEBUG oslo_concurrency.lockutils [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2096.440460] env[62684]: INFO nova.compute.manager [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 23578214-6708-43ae-88ce-56212083532a] Took 26.66 seconds to build instance. [ 2096.465603] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-eedf3b9d-16d2-4e27-882c-51007c79a95a tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2096.466150] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-df01c90c-ba42-4a7b-9525-3d603b486f92 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.474409] env[62684]: DEBUG oslo_vmware.api [None req-eedf3b9d-16d2-4e27-882c-51007c79a95a tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2096.474409] env[62684]: value = "task-2053278" [ 2096.474409] env[62684]: _type = "Task" [ 2096.474409] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2096.483370] env[62684]: DEBUG oslo_vmware.api [None req-eedf3b9d-16d2-4e27-882c-51007c79a95a tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053278, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2096.635226] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62684) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2096.635513] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 6.048s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2096.635804] env[62684]: DEBUG oslo_concurrency.lockutils [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.032s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2096.637743] env[62684]: INFO nova.compute.claims [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2096.640409] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2096.644015] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Cleaning up deleted instances {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 2096.663610] env[62684]: DEBUG nova.network.neutron [None req-5f06dd05-77ef-41de-9558-bb440a919309 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Successfully updated port: 5f8a8ef1-7f32-48b8-9444-b5fc1ab25e68 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2096.745149] env[62684]: DEBUG nova.compute.manager [req-ea4e84ce-527a-4eb1-a278-3a3e50f0d162 req-49e5b3d8-1d36-445c-af0b-d87e9f6bf795 service nova] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Received event network-vif-plugged-5f8a8ef1-7f32-48b8-9444-b5fc1ab25e68 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2096.745339] env[62684]: DEBUG oslo_concurrency.lockutils [req-ea4e84ce-527a-4eb1-a278-3a3e50f0d162 req-49e5b3d8-1d36-445c-af0b-d87e9f6bf795 service nova] Acquiring lock "81b7949d-be24-46c9-8dc8-c249b65bb039-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2096.745618] env[62684]: DEBUG oslo_concurrency.lockutils [req-ea4e84ce-527a-4eb1-a278-3a3e50f0d162 req-49e5b3d8-1d36-445c-af0b-d87e9f6bf795 service nova] Lock "81b7949d-be24-46c9-8dc8-c249b65bb039-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2096.745796] env[62684]: DEBUG oslo_concurrency.lockutils [req-ea4e84ce-527a-4eb1-a278-3a3e50f0d162 req-49e5b3d8-1d36-445c-af0b-d87e9f6bf795 service nova] Lock "81b7949d-be24-46c9-8dc8-c249b65bb039-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2096.745980] env[62684]: DEBUG nova.compute.manager [req-ea4e84ce-527a-4eb1-a278-3a3e50f0d162 req-49e5b3d8-1d36-445c-af0b-d87e9f6bf795 service nova] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] No waiting events found dispatching network-vif-plugged-5f8a8ef1-7f32-48b8-9444-b5fc1ab25e68 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2096.746164] env[62684]: WARNING nova.compute.manager [req-ea4e84ce-527a-4eb1-a278-3a3e50f0d162 req-49e5b3d8-1d36-445c-af0b-d87e9f6bf795 service nova] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Received unexpected event network-vif-plugged-5f8a8ef1-7f32-48b8-9444-b5fc1ab25e68 for instance with vm_state active and task_state None. [ 2096.768405] env[62684]: DEBUG oslo_vmware.api [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053277, 'name': ReconfigVM_Task, 'duration_secs': 0.911794} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2096.768719] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Reconfigured VM instance instance-00000050 to attach disk [datastore2] daf1486b-d5c2-4341-8a27-36eeeb08cd26/daf1486b-d5c2-4341-8a27-36eeeb08cd26.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2096.769601] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0417d950-a84e-4c16-9322-eb8c3d4fc09e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.777071] env[62684]: DEBUG oslo_vmware.api [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 2096.777071] env[62684]: value = "task-2053279" [ 2096.777071] env[62684]: _type = "Task" [ 2096.777071] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2096.784738] env[62684]: DEBUG oslo_vmware.api [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053279, 'name': Rename_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2096.836399] env[62684]: DEBUG oslo_concurrency.lockutils [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Acquiring lock "4cf48f05-d643-47e6-9a0b-33415d80890c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2096.836698] env[62684]: DEBUG oslo_concurrency.lockutils [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Lock "4cf48f05-d643-47e6-9a0b-33415d80890c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2096.836933] env[62684]: DEBUG oslo_concurrency.lockutils [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Acquiring lock "4cf48f05-d643-47e6-9a0b-33415d80890c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2096.837145] env[62684]: DEBUG oslo_concurrency.lockutils [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Lock "4cf48f05-d643-47e6-9a0b-33415d80890c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2096.837322] env[62684]: DEBUG oslo_concurrency.lockutils [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Lock "4cf48f05-d643-47e6-9a0b-33415d80890c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2096.840984] env[62684]: INFO nova.compute.manager [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Terminating instance [ 2096.842918] env[62684]: DEBUG oslo_concurrency.lockutils [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Acquiring lock "refresh_cache-4cf48f05-d643-47e6-9a0b-33415d80890c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2096.843117] env[62684]: DEBUG oslo_concurrency.lockutils [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Acquired lock "refresh_cache-4cf48f05-d643-47e6-9a0b-33415d80890c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2096.843297] env[62684]: DEBUG nova.network.neutron [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2096.940327] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7c65d57c-0ebf-4954-b707-ed5ea9e8a939 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "23578214-6708-43ae-88ce-56212083532a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.169s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2096.985494] env[62684]: DEBUG oslo_vmware.api [None req-eedf3b9d-16d2-4e27-882c-51007c79a95a tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053278, 'name': PowerOffVM_Task, 'duration_secs': 0.458066} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2096.985863] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-eedf3b9d-16d2-4e27-882c-51007c79a95a tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2096.986267] env[62684]: DEBUG nova.compute.manager [None req-eedf3b9d-16d2-4e27-882c-51007c79a95a tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2096.987158] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-791d64da-8ac5-447c-ad83-f608f7db235a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.156509] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] There are 56 instances to clean {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 2097.156802] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 2aac4230-2070-48be-b91a-5cb4218a0574] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2097.166395] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5f06dd05-77ef-41de-9558-bb440a919309 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "refresh_cache-81b7949d-be24-46c9-8dc8-c249b65bb039" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2097.171021] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5f06dd05-77ef-41de-9558-bb440a919309 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquired lock "refresh_cache-81b7949d-be24-46c9-8dc8-c249b65bb039" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2097.171021] env[62684]: DEBUG nova.network.neutron [None req-5f06dd05-77ef-41de-9558-bb440a919309 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2097.288605] env[62684]: DEBUG oslo_vmware.api [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053279, 'name': Rename_Task, 'duration_secs': 0.2982} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2097.288605] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2097.288605] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1810b5cf-7720-4b97-ab0b-97d1b061fc97 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.295141] env[62684]: DEBUG oslo_vmware.api [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 2097.295141] env[62684]: value = "task-2053280" [ 2097.295141] env[62684]: _type = "Task" [ 2097.295141] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2097.302969] env[62684]: DEBUG oslo_vmware.api [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053280, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2097.365050] env[62684]: DEBUG nova.network.neutron [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2097.423273] env[62684]: DEBUG nova.network.neutron [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2097.499097] env[62684]: DEBUG oslo_concurrency.lockutils [None req-eedf3b9d-16d2-4e27-882c-51007c79a95a tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "57537508-06e7-43a4-95c5-c4399b8bf93f" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.548s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2097.665340] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 2f8f7e02-54fb-4275-badb-35c0b840ab33] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2097.717085] env[62684]: WARNING nova.network.neutron [None req-5f06dd05-77ef-41de-9558-bb440a919309 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] bbb78a3c-6804-4aae-9107-4ae6699c305d already exists in list: networks containing: ['bbb78a3c-6804-4aae-9107-4ae6699c305d']. ignoring it [ 2097.717357] env[62684]: WARNING nova.network.neutron [None req-5f06dd05-77ef-41de-9558-bb440a919309 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] bbb78a3c-6804-4aae-9107-4ae6699c305d already exists in list: networks containing: ['bbb78a3c-6804-4aae-9107-4ae6699c305d']. ignoring it [ 2097.804563] env[62684]: DEBUG oslo_vmware.api [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053280, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2097.926755] env[62684]: DEBUG oslo_concurrency.lockutils [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Releasing lock "refresh_cache-4cf48f05-d643-47e6-9a0b-33415d80890c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2097.927384] env[62684]: DEBUG nova.compute.manager [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2097.927683] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2097.929821] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51b2b06e-8d91-43a1-a3eb-2c2dd6bdfb0e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.940668] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2097.945914] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aa0ef2d8-767c-47d4-ae37-d18a28700ae2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.958329] env[62684]: DEBUG oslo_vmware.api [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Waiting for the task: (returnval){ [ 2097.958329] env[62684]: value = "task-2053281" [ 2097.958329] env[62684]: _type = "Task" [ 2097.958329] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2097.975089] env[62684]: DEBUG oslo_vmware.api [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Task: {'id': task-2053281, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2098.020515] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fda2c628-a5ec-4e1c-849b-a23dc27cae5f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.030131] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4e52b37-00ca-4d28-a445-32b4ab985760 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.069406] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94fc538c-2ac0-43b5-8e5a-7f7183e3430d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.080251] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02e04ce3-2c30-4b8c-876f-da950f29f7b2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.098726] env[62684]: DEBUG nova.compute.provider_tree [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2098.169238] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 845b2e2a-cee0-4598-afbd-1f07aa52468f] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2098.189630] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b323ab27-ae5d-4465-b02c-ef27ecc693ad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Acquiring lock "025dfe36-1f14-4bda-84a0-d424364b745b" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2098.189630] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b323ab27-ae5d-4465-b02c-ef27ecc693ad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "025dfe36-1f14-4bda-84a0-d424364b745b" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2098.301048] env[62684]: DEBUG nova.network.neutron [None req-5f06dd05-77ef-41de-9558-bb440a919309 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Updating instance_info_cache with network_info: [{"id": "fafc2062-9754-4ce0-8647-362b6bb8f8d7", "address": "fa:16:3e:a1:7d:89", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.142", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfafc2062-97", "ovs_interfaceid": "fafc2062-9754-4ce0-8647-362b6bb8f8d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ff6434b2-d91e-43cc-b6f8-03cec921c38d", "address": "fa:16:3e:88:3a:fb", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff6434b2-d9", "ovs_interfaceid": "ff6434b2-d91e-43cc-b6f8-03cec921c38d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5f8a8ef1-7f32-48b8-9444-b5fc1ab25e68", "address": "fa:16:3e:f0:7c:90", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f8a8ef1-7f", "ovs_interfaceid": "5f8a8ef1-7f32-48b8-9444-b5fc1ab25e68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2098.307973] env[62684]: DEBUG oslo_vmware.api [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053280, 'name': PowerOnVM_Task, 'duration_secs': 0.531965} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2098.308669] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2098.308669] env[62684]: INFO nova.compute.manager [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Took 15.18 seconds to spawn the instance on the hypervisor. [ 2098.308669] env[62684]: DEBUG nova.compute.manager [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2098.309504] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f29d9365-b369-4719-854b-53eb773c7ded {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.468532] env[62684]: DEBUG oslo_vmware.api [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Task: {'id': task-2053281, 'name': PowerOffVM_Task, 'duration_secs': 0.189417} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2098.468819] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2098.468995] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2098.469271] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8a56d6b5-f1e7-4c75-a084-d61ad3771c09 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.495339] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2098.495611] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2098.495817] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Deleting the datastore file [datastore1] 4cf48f05-d643-47e6-9a0b-33415d80890c {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2098.496115] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a3067369-97e8-49b3-8a54-a4cb98eab6f8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.502524] env[62684]: DEBUG oslo_vmware.api [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Waiting for the task: (returnval){ [ 2098.502524] env[62684]: value = "task-2053283" [ 2098.502524] env[62684]: _type = "Task" [ 2098.502524] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2098.510922] env[62684]: DEBUG oslo_vmware.api [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Task: {'id': task-2053283, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2098.518429] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a782d755-109e-4683-8c6e-c51d64e12d96 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "23578214-6708-43ae-88ce-56212083532a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2098.518723] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a782d755-109e-4683-8c6e-c51d64e12d96 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "23578214-6708-43ae-88ce-56212083532a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2098.519287] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a782d755-109e-4683-8c6e-c51d64e12d96 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "23578214-6708-43ae-88ce-56212083532a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2098.519287] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a782d755-109e-4683-8c6e-c51d64e12d96 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "23578214-6708-43ae-88ce-56212083532a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2098.519439] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a782d755-109e-4683-8c6e-c51d64e12d96 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "23578214-6708-43ae-88ce-56212083532a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2098.522185] env[62684]: INFO nova.compute.manager [None req-a782d755-109e-4683-8c6e-c51d64e12d96 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 23578214-6708-43ae-88ce-56212083532a] Terminating instance [ 2098.524198] env[62684]: DEBUG nova.compute.manager [None req-a782d755-109e-4683-8c6e-c51d64e12d96 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 23578214-6708-43ae-88ce-56212083532a] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2098.524508] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a782d755-109e-4683-8c6e-c51d64e12d96 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 23578214-6708-43ae-88ce-56212083532a] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2098.525567] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd70f0d8-0aea-40f7-bc6f-6acd086243c2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.534027] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a782d755-109e-4683-8c6e-c51d64e12d96 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 23578214-6708-43ae-88ce-56212083532a] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2098.534027] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0dcda969-a51e-41bd-a7dd-c97319293158 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.540087] env[62684]: DEBUG oslo_vmware.api [None req-a782d755-109e-4683-8c6e-c51d64e12d96 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2098.540087] env[62684]: value = "task-2053284" [ 2098.540087] env[62684]: _type = "Task" [ 2098.540087] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2098.548406] env[62684]: DEBUG oslo_vmware.api [None req-a782d755-109e-4683-8c6e-c51d64e12d96 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053284, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2098.602984] env[62684]: DEBUG nova.scheduler.client.report [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2098.672732] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 41da0c18-dd9c-49bb-8b0d-a907575ee22e] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2098.692035] env[62684]: INFO nova.compute.manager [None req-b323ab27-ae5d-4465-b02c-ef27ecc693ad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Detaching volume 7b18a98f-a692-4a68-9a65-227928ad0562 [ 2098.732955] env[62684]: INFO nova.virt.block_device [None req-b323ab27-ae5d-4465-b02c-ef27ecc693ad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Attempting to driver detach volume 7b18a98f-a692-4a68-9a65-227928ad0562 from mountpoint /dev/sdb [ 2098.733274] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-b323ab27-ae5d-4465-b02c-ef27ecc693ad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Volume detach. Driver type: vmdk {{(pid=62684) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2098.733475] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-b323ab27-ae5d-4465-b02c-ef27ecc693ad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421315', 'volume_id': '7b18a98f-a692-4a68-9a65-227928ad0562', 'name': 'volume-7b18a98f-a692-4a68-9a65-227928ad0562', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '025dfe36-1f14-4bda-84a0-d424364b745b', 'attached_at': '', 'detached_at': '', 'volume_id': '7b18a98f-a692-4a68-9a65-227928ad0562', 'serial': '7b18a98f-a692-4a68-9a65-227928ad0562'} {{(pid=62684) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2098.734811] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6bc612a-d75b-448b-96f1-691c680b78e0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.759200] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-215cda52-aad3-4936-8a39-b8f52cae2b03 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.767638] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9c3d12f-4665-4335-a52d-11e7f388cfc3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.775967] env[62684]: DEBUG nova.compute.manager [req-50999d0c-30c6-4b61-a57f-0c9c8cf9e950 req-a7ec169c-2e77-4952-b1e1-7284c5127d28 service nova] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Received event network-changed-5f8a8ef1-7f32-48b8-9444-b5fc1ab25e68 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2098.776253] env[62684]: DEBUG nova.compute.manager [req-50999d0c-30c6-4b61-a57f-0c9c8cf9e950 req-a7ec169c-2e77-4952-b1e1-7284c5127d28 service nova] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Refreshing instance network info cache due to event network-changed-5f8a8ef1-7f32-48b8-9444-b5fc1ab25e68. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2098.776469] env[62684]: DEBUG oslo_concurrency.lockutils [req-50999d0c-30c6-4b61-a57f-0c9c8cf9e950 req-a7ec169c-2e77-4952-b1e1-7284c5127d28 service nova] Acquiring lock "refresh_cache-81b7949d-be24-46c9-8dc8-c249b65bb039" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2098.792405] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28fc812c-e1db-4d39-b6ec-01285a8c7ccd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.808283] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5f06dd05-77ef-41de-9558-bb440a919309 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Releasing lock "refresh_cache-81b7949d-be24-46c9-8dc8-c249b65bb039" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2098.809312] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5f06dd05-77ef-41de-9558-bb440a919309 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "81b7949d-be24-46c9-8dc8-c249b65bb039" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2098.809312] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5f06dd05-77ef-41de-9558-bb440a919309 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquired lock "81b7949d-be24-46c9-8dc8-c249b65bb039" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2098.809427] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-b323ab27-ae5d-4465-b02c-ef27ecc693ad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] The volume has not been displaced from its original location: [datastore1] volume-7b18a98f-a692-4a68-9a65-227928ad0562/volume-7b18a98f-a692-4a68-9a65-227928ad0562.vmdk. No consolidation needed. {{(pid=62684) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2098.814635] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-b323ab27-ae5d-4465-b02c-ef27ecc693ad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Reconfiguring VM instance instance-00000029 to detach disk 2001 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2098.814957] env[62684]: DEBUG oslo_concurrency.lockutils [req-50999d0c-30c6-4b61-a57f-0c9c8cf9e950 req-a7ec169c-2e77-4952-b1e1-7284c5127d28 service nova] Acquired lock "refresh_cache-81b7949d-be24-46c9-8dc8-c249b65bb039" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2098.815163] env[62684]: DEBUG nova.network.neutron [req-50999d0c-30c6-4b61-a57f-0c9c8cf9e950 req-a7ec169c-2e77-4952-b1e1-7284c5127d28 service nova] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Refreshing network info cache for port 5f8a8ef1-7f32-48b8-9444-b5fc1ab25e68 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2098.818329] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30edf28c-c7dd-4a85-bc5d-0d68ef9c99ce {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.820214] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-696a83f2-dd7c-4bd8-94f5-365d8d5521d0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.841055] env[62684]: INFO nova.compute.manager [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Took 23.81 seconds to build instance. [ 2098.853756] env[62684]: DEBUG nova.virt.hardware [None req-5f06dd05-77ef-41de-9558-bb440a919309 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2098.853904] env[62684]: DEBUG nova.virt.hardware [None req-5f06dd05-77ef-41de-9558-bb440a919309 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2098.854081] env[62684]: DEBUG nova.virt.hardware [None req-5f06dd05-77ef-41de-9558-bb440a919309 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2098.854277] env[62684]: DEBUG nova.virt.hardware [None req-5f06dd05-77ef-41de-9558-bb440a919309 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2098.854430] env[62684]: DEBUG nova.virt.hardware [None req-5f06dd05-77ef-41de-9558-bb440a919309 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2098.854581] env[62684]: DEBUG nova.virt.hardware [None req-5f06dd05-77ef-41de-9558-bb440a919309 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2098.854792] env[62684]: DEBUG nova.virt.hardware [None req-5f06dd05-77ef-41de-9558-bb440a919309 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2098.854959] env[62684]: DEBUG nova.virt.hardware [None req-5f06dd05-77ef-41de-9558-bb440a919309 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2098.855245] env[62684]: DEBUG nova.virt.hardware [None req-5f06dd05-77ef-41de-9558-bb440a919309 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2098.855332] env[62684]: DEBUG nova.virt.hardware [None req-5f06dd05-77ef-41de-9558-bb440a919309 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2098.855496] env[62684]: DEBUG nova.virt.hardware [None req-5f06dd05-77ef-41de-9558-bb440a919309 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2098.863148] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5f06dd05-77ef-41de-9558-bb440a919309 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Reconfiguring VM to attach interface {{(pid=62684) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 2098.864720] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ee967c12-b8f0-4516-b1de-2d324f669a91 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.876631] env[62684]: DEBUG oslo_vmware.api [None req-b323ab27-ae5d-4465-b02c-ef27ecc693ad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Waiting for the task: (returnval){ [ 2098.876631] env[62684]: value = "task-2053285" [ 2098.876631] env[62684]: _type = "Task" [ 2098.876631] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2098.882702] env[62684]: DEBUG oslo_vmware.api [None req-5f06dd05-77ef-41de-9558-bb440a919309 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 2098.882702] env[62684]: value = "task-2053286" [ 2098.882702] env[62684]: _type = "Task" [ 2098.882702] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2098.886474] env[62684]: DEBUG oslo_vmware.api [None req-b323ab27-ae5d-4465-b02c-ef27ecc693ad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053285, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2098.894173] env[62684]: DEBUG oslo_vmware.api [None req-5f06dd05-77ef-41de-9558-bb440a919309 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053286, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2099.014531] env[62684]: DEBUG oslo_vmware.api [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Task: {'id': task-2053283, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.101102} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2099.014838] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2099.014992] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2099.015205] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2099.015389] env[62684]: INFO nova.compute.manager [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Took 1.09 seconds to destroy the instance on the hypervisor. [ 2099.015677] env[62684]: DEBUG oslo.service.loopingcall [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2099.015875] env[62684]: DEBUG nova.compute.manager [-] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2099.015971] env[62684]: DEBUG nova.network.neutron [-] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2099.034309] env[62684]: DEBUG nova.network.neutron [-] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2099.050293] env[62684]: DEBUG oslo_vmware.api [None req-a782d755-109e-4683-8c6e-c51d64e12d96 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053284, 'name': PowerOffVM_Task, 'duration_secs': 0.184322} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2099.050434] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a782d755-109e-4683-8c6e-c51d64e12d96 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 23578214-6708-43ae-88ce-56212083532a] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2099.050683] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a782d755-109e-4683-8c6e-c51d64e12d96 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 23578214-6708-43ae-88ce-56212083532a] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2099.050984] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d59ff477-54b8-4100-b58a-5532d5448ac2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.057932] env[62684]: DEBUG nova.objects.instance [None req-322424b0-0243-434c-8c1f-9ce9a1173dc2 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lazy-loading 'flavor' on Instance uuid 57537508-06e7-43a4-95c5-c4399b8bf93f {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2099.108024] env[62684]: DEBUG oslo_concurrency.lockutils [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.472s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2099.108620] env[62684]: DEBUG nova.compute.manager [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2099.111950] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fbb14cc6-eed5-4ebd-8799-78b655876376 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.671s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2099.112241] env[62684]: DEBUG nova.objects.instance [None req-fbb14cc6-eed5-4ebd-8799-78b655876376 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lazy-loading 'resources' on Instance uuid 983218ac-7cf3-48ef-88d8-aa9e9322df4b {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2099.175771] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 380a804e-e1bf-4efa-8bb8-213733778927] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2099.346624] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a782d755-109e-4683-8c6e-c51d64e12d96 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 23578214-6708-43ae-88ce-56212083532a] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2099.346881] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a782d755-109e-4683-8c6e-c51d64e12d96 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 23578214-6708-43ae-88ce-56212083532a] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2099.347106] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-a782d755-109e-4683-8c6e-c51d64e12d96 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Deleting the datastore file [datastore1] 23578214-6708-43ae-88ce-56212083532a {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2099.347408] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b1278da7-ff64-45fb-bde8-d38ef340d8cf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.354355] env[62684]: DEBUG oslo_vmware.api [None req-a782d755-109e-4683-8c6e-c51d64e12d96 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2099.354355] env[62684]: value = "task-2053288" [ 2099.354355] env[62684]: _type = "Task" [ 2099.354355] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2099.362505] env[62684]: DEBUG oslo_vmware.api [None req-a782d755-109e-4683-8c6e-c51d64e12d96 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053288, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2099.363104] env[62684]: DEBUG oslo_concurrency.lockutils [None req-acda92df-af6f-4418-94b0-79ef4dc50acf tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lock "daf1486b-d5c2-4341-8a27-36eeeb08cd26" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.348s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2099.386923] env[62684]: DEBUG oslo_vmware.api [None req-b323ab27-ae5d-4465-b02c-ef27ecc693ad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053285, 'name': ReconfigVM_Task, 'duration_secs': 0.221843} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2099.390434] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-b323ab27-ae5d-4465-b02c-ef27ecc693ad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Reconfigured VM instance instance-00000029 to detach disk 2001 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2099.395301] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-82b99afb-d760-4462-b6b4-a4f62bbd7383 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.410866] env[62684]: DEBUG oslo_vmware.api [None req-5f06dd05-77ef-41de-9558-bb440a919309 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053286, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2099.412395] env[62684]: DEBUG oslo_vmware.api [None req-b323ab27-ae5d-4465-b02c-ef27ecc693ad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Waiting for the task: (returnval){ [ 2099.412395] env[62684]: value = "task-2053289" [ 2099.412395] env[62684]: _type = "Task" [ 2099.412395] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2099.420265] env[62684]: DEBUG oslo_vmware.api [None req-b323ab27-ae5d-4465-b02c-ef27ecc693ad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053289, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2099.536668] env[62684]: DEBUG nova.network.neutron [-] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2099.562769] env[62684]: DEBUG oslo_concurrency.lockutils [None req-322424b0-0243-434c-8c1f-9ce9a1173dc2 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "refresh_cache-57537508-06e7-43a4-95c5-c4399b8bf93f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2099.562968] env[62684]: DEBUG oslo_concurrency.lockutils [None req-322424b0-0243-434c-8c1f-9ce9a1173dc2 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquired lock "refresh_cache-57537508-06e7-43a4-95c5-c4399b8bf93f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2099.563253] env[62684]: DEBUG nova.network.neutron [None req-322424b0-0243-434c-8c1f-9ce9a1173dc2 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2099.563404] env[62684]: DEBUG nova.objects.instance [None req-322424b0-0243-434c-8c1f-9ce9a1173dc2 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lazy-loading 'info_cache' on Instance uuid 57537508-06e7-43a4-95c5-c4399b8bf93f {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2099.613515] env[62684]: DEBUG nova.compute.utils [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2099.618355] env[62684]: DEBUG nova.compute.manager [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2099.618355] env[62684]: DEBUG nova.network.neutron [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2099.625435] env[62684]: DEBUG nova.compute.manager [req-4286ea05-bac8-45e6-bbc2-4f89a7acfb19 req-2056a09f-9d67-4973-b70c-143d589d949a service nova] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Received event network-changed-da310d7c-cd12-49ca-8014-efa9469aef45 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2099.625660] env[62684]: DEBUG nova.compute.manager [req-4286ea05-bac8-45e6-bbc2-4f89a7acfb19 req-2056a09f-9d67-4973-b70c-143d589d949a service nova] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Refreshing instance network info cache due to event network-changed-da310d7c-cd12-49ca-8014-efa9469aef45. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2099.625898] env[62684]: DEBUG oslo_concurrency.lockutils [req-4286ea05-bac8-45e6-bbc2-4f89a7acfb19 req-2056a09f-9d67-4973-b70c-143d589d949a service nova] Acquiring lock "refresh_cache-daf1486b-d5c2-4341-8a27-36eeeb08cd26" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2099.626071] env[62684]: DEBUG oslo_concurrency.lockutils [req-4286ea05-bac8-45e6-bbc2-4f89a7acfb19 req-2056a09f-9d67-4973-b70c-143d589d949a service nova] Acquired lock "refresh_cache-daf1486b-d5c2-4341-8a27-36eeeb08cd26" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2099.626501] env[62684]: DEBUG nova.network.neutron [req-4286ea05-bac8-45e6-bbc2-4f89a7acfb19 req-2056a09f-9d67-4973-b70c-143d589d949a service nova] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Refreshing network info cache for port da310d7c-cd12-49ca-8014-efa9469aef45 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2099.666515] env[62684]: DEBUG nova.policy [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a3800d71923848db8635de9a8a2ff9f6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '76d88ac878d44480b3b54b24ab87efa9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2099.680537] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: a1b7c2a7-f21d-41f4-9102-e656b8205e1f] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2099.873562] env[62684]: DEBUG oslo_vmware.api [None req-a782d755-109e-4683-8c6e-c51d64e12d96 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053288, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145176} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2099.873814] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-a782d755-109e-4683-8c6e-c51d64e12d96 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2099.874009] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a782d755-109e-4683-8c6e-c51d64e12d96 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 23578214-6708-43ae-88ce-56212083532a] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2099.874727] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a782d755-109e-4683-8c6e-c51d64e12d96 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 23578214-6708-43ae-88ce-56212083532a] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2099.874805] env[62684]: INFO nova.compute.manager [None req-a782d755-109e-4683-8c6e-c51d64e12d96 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 23578214-6708-43ae-88ce-56212083532a] Took 1.35 seconds to destroy the instance on the hypervisor. [ 2099.874994] env[62684]: DEBUG oslo.service.loopingcall [None req-a782d755-109e-4683-8c6e-c51d64e12d96 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2099.875903] env[62684]: DEBUG nova.compute.manager [-] [instance: 23578214-6708-43ae-88ce-56212083532a] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2099.875903] env[62684]: DEBUG nova.network.neutron [-] [instance: 23578214-6708-43ae-88ce-56212083532a] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2099.902560] env[62684]: DEBUG oslo_vmware.api [None req-5f06dd05-77ef-41de-9558-bb440a919309 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053286, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2099.903533] env[62684]: DEBUG nova.network.neutron [req-50999d0c-30c6-4b61-a57f-0c9c8cf9e950 req-a7ec169c-2e77-4952-b1e1-7284c5127d28 service nova] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Updated VIF entry in instance network info cache for port 5f8a8ef1-7f32-48b8-9444-b5fc1ab25e68. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2099.904226] env[62684]: DEBUG nova.network.neutron [req-50999d0c-30c6-4b61-a57f-0c9c8cf9e950 req-a7ec169c-2e77-4952-b1e1-7284c5127d28 service nova] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Updating instance_info_cache with network_info: [{"id": "fafc2062-9754-4ce0-8647-362b6bb8f8d7", "address": "fa:16:3e:a1:7d:89", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.142", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfafc2062-97", "ovs_interfaceid": "fafc2062-9754-4ce0-8647-362b6bb8f8d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ff6434b2-d91e-43cc-b6f8-03cec921c38d", "address": "fa:16:3e:88:3a:fb", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff6434b2-d9", "ovs_interfaceid": "ff6434b2-d91e-43cc-b6f8-03cec921c38d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5f8a8ef1-7f32-48b8-9444-b5fc1ab25e68", "address": "fa:16:3e:f0:7c:90", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f8a8ef1-7f", "ovs_interfaceid": "5f8a8ef1-7f32-48b8-9444-b5fc1ab25e68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2099.924508] env[62684]: DEBUG oslo_vmware.api [None req-b323ab27-ae5d-4465-b02c-ef27ecc693ad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053289, 'name': ReconfigVM_Task, 'duration_secs': 0.142672} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2099.924984] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-b323ab27-ae5d-4465-b02c-ef27ecc693ad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421315', 'volume_id': '7b18a98f-a692-4a68-9a65-227928ad0562', 'name': 'volume-7b18a98f-a692-4a68-9a65-227928ad0562', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '025dfe36-1f14-4bda-84a0-d424364b745b', 'attached_at': '', 'detached_at': '', 'volume_id': '7b18a98f-a692-4a68-9a65-227928ad0562', 'serial': '7b18a98f-a692-4a68-9a65-227928ad0562'} {{(pid=62684) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2099.945202] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b085ea82-5407-4b08-85ab-20d6726b1882 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.953939] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84389549-dfc3-48c4-a16e-0fad2480143c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.986190] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cd302e9-861e-4566-aaa5-4b0db76deef5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.994257] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5d242a5-c6d7-4b82-a5b9-0f7286bca5bb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.008472] env[62684]: DEBUG nova.compute.provider_tree [None req-fbb14cc6-eed5-4ebd-8799-78b655876376 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2100.039487] env[62684]: INFO nova.compute.manager [-] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Took 1.02 seconds to deallocate network for instance. [ 2100.066320] env[62684]: DEBUG nova.objects.base [None req-322424b0-0243-434c-8c1f-9ce9a1173dc2 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Object Instance<57537508-06e7-43a4-95c5-c4399b8bf93f> lazy-loaded attributes: flavor,info_cache {{(pid=62684) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2100.068979] env[62684]: DEBUG nova.network.neutron [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Successfully created port: 8d2957b9-747d-4fea-8047-0980e3548bb2 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2100.118665] env[62684]: DEBUG nova.compute.manager [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2100.183844] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: fcc937e3-163d-432b-a131-a53c002e5e8d] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2100.407948] env[62684]: DEBUG oslo_concurrency.lockutils [req-50999d0c-30c6-4b61-a57f-0c9c8cf9e950 req-a7ec169c-2e77-4952-b1e1-7284c5127d28 service nova] Releasing lock "refresh_cache-81b7949d-be24-46c9-8dc8-c249b65bb039" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2100.408840] env[62684]: DEBUG oslo_vmware.api [None req-5f06dd05-77ef-41de-9558-bb440a919309 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053286, 'name': ReconfigVM_Task, 'duration_secs': 1.067618} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2100.409688] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5f06dd05-77ef-41de-9558-bb440a919309 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Releasing lock "81b7949d-be24-46c9-8dc8-c249b65bb039" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2100.409953] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5f06dd05-77ef-41de-9558-bb440a919309 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Reconfigured VM to attach interface {{(pid=62684) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 2100.486684] env[62684]: DEBUG nova.objects.instance [None req-b323ab27-ae5d-4465-b02c-ef27ecc693ad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lazy-loading 'flavor' on Instance uuid 025dfe36-1f14-4bda-84a0-d424364b745b {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2100.501879] env[62684]: DEBUG nova.network.neutron [req-4286ea05-bac8-45e6-bbc2-4f89a7acfb19 req-2056a09f-9d67-4973-b70c-143d589d949a service nova] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Updated VIF entry in instance network info cache for port da310d7c-cd12-49ca-8014-efa9469aef45. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2100.502334] env[62684]: DEBUG nova.network.neutron [req-4286ea05-bac8-45e6-bbc2-4f89a7acfb19 req-2056a09f-9d67-4973-b70c-143d589d949a service nova] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Updating instance_info_cache with network_info: [{"id": "da310d7c-cd12-49ca-8014-efa9469aef45", "address": "fa:16:3e:5a:6f:ea", "network": {"id": "64494ea7-f6d9-430c-8ac7-e876e763004b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2056829508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.164", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e57b232a7e7647c7a3b2bca3c096feb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda310d7c-cd", "ovs_interfaceid": "da310d7c-cd12-49ca-8014-efa9469aef45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2100.511916] env[62684]: DEBUG nova.scheduler.client.report [None req-fbb14cc6-eed5-4ebd-8799-78b655876376 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2100.547276] env[62684]: DEBUG oslo_concurrency.lockutils [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2100.690172] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 7b29207a-7fa8-4374-819e-c046b2014969] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2100.790271] env[62684]: DEBUG nova.network.neutron [-] [instance: 23578214-6708-43ae-88ce-56212083532a] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2100.915484] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5f06dd05-77ef-41de-9558-bb440a919309 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "interface-81b7949d-be24-46c9-8dc8-c249b65bb039-5f8a8ef1-7f32-48b8-9444-b5fc1ab25e68" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.082s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2100.942161] env[62684]: DEBUG nova.network.neutron [None req-322424b0-0243-434c-8c1f-9ce9a1173dc2 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Updating instance_info_cache with network_info: [{"id": "0a96e2ce-2335-44e2-940d-26d3afbafa3a", "address": "fa:16:3e:6d:b8:02", "network": {"id": "1751424b-54a9-4879-9f32-aa15a9bb632c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-120070593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "263c101fcc5e493789b79dfd1ba97cc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92e4d027-e755-417b-8eea-9a8f24b85140", "external-id": "nsx-vlan-transportzone-756", "segmentation_id": 756, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a96e2ce-23", "ovs_interfaceid": "0a96e2ce-2335-44e2-940d-26d3afbafa3a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2101.005461] env[62684]: DEBUG oslo_concurrency.lockutils [req-4286ea05-bac8-45e6-bbc2-4f89a7acfb19 req-2056a09f-9d67-4973-b70c-143d589d949a service nova] Releasing lock "refresh_cache-daf1486b-d5c2-4341-8a27-36eeeb08cd26" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2101.016767] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fbb14cc6-eed5-4ebd-8799-78b655876376 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.905s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2101.022441] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.652s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2101.025025] env[62684]: INFO nova.compute.claims [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2101.043172] env[62684]: INFO nova.scheduler.client.report [None req-fbb14cc6-eed5-4ebd-8799-78b655876376 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Deleted allocations for instance 983218ac-7cf3-48ef-88d8-aa9e9322df4b [ 2101.130453] env[62684]: DEBUG nova.compute.manager [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2101.164140] env[62684]: DEBUG nova.virt.hardware [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2101.164430] env[62684]: DEBUG nova.virt.hardware [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2101.164598] env[62684]: DEBUG nova.virt.hardware [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2101.164798] env[62684]: DEBUG nova.virt.hardware [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2101.164956] env[62684]: DEBUG nova.virt.hardware [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2101.165133] env[62684]: DEBUG nova.virt.hardware [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2101.165844] env[62684]: DEBUG nova.virt.hardware [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2101.165844] env[62684]: DEBUG nova.virt.hardware [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2101.165844] env[62684]: DEBUG nova.virt.hardware [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2101.165844] env[62684]: DEBUG nova.virt.hardware [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2101.166029] env[62684]: DEBUG nova.virt.hardware [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2101.166913] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f67a684-b05d-4392-b5a0-8173d66f7ab8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.175431] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bb047da-1e8b-4c78-ad10-2a924b3025a8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.193958] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 548df581-073b-41d4-bcbe-df7342a2beca] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2101.297998] env[62684]: INFO nova.compute.manager [-] [instance: 23578214-6708-43ae-88ce-56212083532a] Took 1.42 seconds to deallocate network for instance. [ 2101.445814] env[62684]: DEBUG oslo_concurrency.lockutils [None req-322424b0-0243-434c-8c1f-9ce9a1173dc2 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Releasing lock "refresh_cache-57537508-06e7-43a4-95c5-c4399b8bf93f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2101.477843] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fc84bff3-5016-488a-bfe9-41b160df0edf tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Acquiring lock "025dfe36-1f14-4bda-84a0-d424364b745b" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2101.495534] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b323ab27-ae5d-4465-b02c-ef27ecc693ad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "025dfe36-1f14-4bda-84a0-d424364b745b" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.307s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2101.498545] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fc84bff3-5016-488a-bfe9-41b160df0edf tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "025dfe36-1f14-4bda-84a0-d424364b745b" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.020s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2101.498545] env[62684]: DEBUG nova.compute.manager [None req-fc84bff3-5016-488a-bfe9-41b160df0edf tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2101.498545] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07573beb-fefc-4b1d-b0ae-d0a5f43e26b2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.510144] env[62684]: DEBUG nova.compute.manager [None req-fc84bff3-5016-488a-bfe9-41b160df0edf tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62684) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 2101.513136] env[62684]: DEBUG nova.objects.instance [None req-fc84bff3-5016-488a-bfe9-41b160df0edf tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lazy-loading 'flavor' on Instance uuid 025dfe36-1f14-4bda-84a0-d424364b745b {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2101.554918] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fbb14cc6-eed5-4ebd-8799-78b655876376 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "983218ac-7cf3-48ef-88d8-aa9e9322df4b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.095s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2101.700039] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 6faeae10-c0bd-4297-b992-c05511fedb21] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2101.731727] env[62684]: DEBUG nova.compute.manager [req-50629a9b-7e45-44d3-8f91-97c5d33540bf req-24ad4fe1-695d-41a8-99c3-09867f9a5ad0 service nova] [instance: 23578214-6708-43ae-88ce-56212083532a] Received event network-vif-deleted-9833c404-668e-4109-a2eb-c4b18c1fa92c {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2101.805308] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a782d755-109e-4683-8c6e-c51d64e12d96 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2101.949196] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-322424b0-0243-434c-8c1f-9ce9a1173dc2 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2101.949543] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-71fb961b-ae63-4a4e-b418-dde3283877ee {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.957891] env[62684]: DEBUG oslo_vmware.api [None req-322424b0-0243-434c-8c1f-9ce9a1173dc2 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2101.957891] env[62684]: value = "task-2053290" [ 2101.957891] env[62684]: _type = "Task" [ 2101.957891] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2101.972380] env[62684]: DEBUG oslo_vmware.api [None req-322424b0-0243-434c-8c1f-9ce9a1173dc2 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053290, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2102.019084] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc84bff3-5016-488a-bfe9-41b160df0edf tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2102.019084] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d115700b-6ef0-4851-be57-3044255da3f5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.025602] env[62684]: DEBUG oslo_vmware.api [None req-fc84bff3-5016-488a-bfe9-41b160df0edf tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Waiting for the task: (returnval){ [ 2102.025602] env[62684]: value = "task-2053291" [ 2102.025602] env[62684]: _type = "Task" [ 2102.025602] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2102.040435] env[62684]: DEBUG oslo_vmware.api [None req-fc84bff3-5016-488a-bfe9-41b160df0edf tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053291, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2102.206668] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 2c85cf19-dab3-4fa1-a55b-c1e6f16ccc21] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2102.306420] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-274e5e82-dec9-48c9-85fc-4a91b4f19ae9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.314402] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11455a35-ec23-4777-9213-594588bc2e4f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.345857] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-636c7993-cfba-4d59-bda4-c8eb79dc8d9f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.353637] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3580cc9e-196c-47b9-9df3-783effa7173a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.367998] env[62684]: DEBUG nova.compute.provider_tree [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2102.468101] env[62684]: DEBUG oslo_vmware.api [None req-322424b0-0243-434c-8c1f-9ce9a1173dc2 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053290, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2102.535184] env[62684]: DEBUG oslo_vmware.api [None req-fc84bff3-5016-488a-bfe9-41b160df0edf tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053291, 'name': PowerOffVM_Task, 'duration_secs': 0.183174} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2102.535470] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc84bff3-5016-488a-bfe9-41b160df0edf tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2102.535545] env[62684]: DEBUG nova.compute.manager [None req-fc84bff3-5016-488a-bfe9-41b160df0edf tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2102.536362] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e250aad-52d7-4692-8777-3e7086c7e777 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.583148] env[62684]: DEBUG nova.network.neutron [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Successfully updated port: 8d2957b9-747d-4fea-8047-0980e3548bb2 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2102.711485] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: dab11b88-ac23-43f0-9203-024faf41e1f5] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2102.871434] env[62684]: DEBUG nova.scheduler.client.report [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2102.971315] env[62684]: DEBUG oslo_vmware.api [None req-322424b0-0243-434c-8c1f-9ce9a1173dc2 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053290, 'name': PowerOnVM_Task, 'duration_secs': 0.633846} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2102.971681] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-322424b0-0243-434c-8c1f-9ce9a1173dc2 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2102.971909] env[62684]: DEBUG nova.compute.manager [None req-322424b0-0243-434c-8c1f-9ce9a1173dc2 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2102.972711] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5c45fad-5f0a-46e7-9b0f-cc784c681ae2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.009731] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6308299b-a785-4943-a283-a46c521cedea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "interface-81b7949d-be24-46c9-8dc8-c249b65bb039-ff6434b2-d91e-43cc-b6f8-03cec921c38d" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2103.010095] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6308299b-a785-4943-a283-a46c521cedea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "interface-81b7949d-be24-46c9-8dc8-c249b65bb039-ff6434b2-d91e-43cc-b6f8-03cec921c38d" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2103.049765] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fc84bff3-5016-488a-bfe9-41b160df0edf tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "025dfe36-1f14-4bda-84a0-d424364b745b" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.553s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2103.085999] env[62684]: DEBUG oslo_concurrency.lockutils [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "refresh_cache-28886f7c-6efc-4505-84f6-682d75cea215" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2103.086153] env[62684]: DEBUG oslo_concurrency.lockutils [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquired lock "refresh_cache-28886f7c-6efc-4505-84f6-682d75cea215" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2103.086307] env[62684]: DEBUG nova.network.neutron [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2103.216749] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: a56a3fab-e491-44f5-9cf4-2c308138ffc4] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2103.296507] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Acquiring lock "6b461482-0606-4af3-98a2-88c0318d1a69" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2103.297016] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Lock "6b461482-0606-4af3-98a2-88c0318d1a69" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2103.380526] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.358s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2103.380922] env[62684]: DEBUG nova.compute.manager [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2103.383789] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.380s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2103.385312] env[62684]: INFO nova.compute.claims [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2103.512598] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6308299b-a785-4943-a283-a46c521cedea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "81b7949d-be24-46c9-8dc8-c249b65bb039" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2103.512815] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6308299b-a785-4943-a283-a46c521cedea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquired lock "81b7949d-be24-46c9-8dc8-c249b65bb039" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2103.514096] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba4eb332-7bee-4b5c-8c19-91b6126e4370 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.532504] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6106d60-ad74-4f84-9d6d-b34ce279ff88 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.563148] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6308299b-a785-4943-a283-a46c521cedea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Reconfiguring VM to detach interface {{(pid=62684) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 2103.563499] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1c2fe1e2-3acb-4d8e-96ed-e12cb54f2b41 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.583187] env[62684]: DEBUG oslo_vmware.api [None req-6308299b-a785-4943-a283-a46c521cedea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 2103.583187] env[62684]: value = "task-2053292" [ 2103.583187] env[62684]: _type = "Task" [ 2103.583187] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2103.594999] env[62684]: DEBUG oslo_vmware.api [None req-6308299b-a785-4943-a283-a46c521cedea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053292, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2103.633070] env[62684]: DEBUG nova.network.neutron [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2103.720659] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 18a97088-fffa-4b77-8ab0-d24f6f84f516] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2103.799263] env[62684]: DEBUG nova.compute.manager [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2103.824165] env[62684]: DEBUG nova.compute.manager [req-d1ba5fd6-b3dc-4bf9-a710-0aec4c0ba362 req-98411cbe-4c7f-4443-9acd-12a798dafeb3 service nova] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Received event network-vif-plugged-8d2957b9-747d-4fea-8047-0980e3548bb2 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2103.824165] env[62684]: DEBUG oslo_concurrency.lockutils [req-d1ba5fd6-b3dc-4bf9-a710-0aec4c0ba362 req-98411cbe-4c7f-4443-9acd-12a798dafeb3 service nova] Acquiring lock "28886f7c-6efc-4505-84f6-682d75cea215-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2103.824307] env[62684]: DEBUG oslo_concurrency.lockutils [req-d1ba5fd6-b3dc-4bf9-a710-0aec4c0ba362 req-98411cbe-4c7f-4443-9acd-12a798dafeb3 service nova] Lock "28886f7c-6efc-4505-84f6-682d75cea215-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2103.824478] env[62684]: DEBUG oslo_concurrency.lockutils [req-d1ba5fd6-b3dc-4bf9-a710-0aec4c0ba362 req-98411cbe-4c7f-4443-9acd-12a798dafeb3 service nova] Lock "28886f7c-6efc-4505-84f6-682d75cea215-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2103.824807] env[62684]: DEBUG nova.compute.manager [req-d1ba5fd6-b3dc-4bf9-a710-0aec4c0ba362 req-98411cbe-4c7f-4443-9acd-12a798dafeb3 service nova] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] No waiting events found dispatching network-vif-plugged-8d2957b9-747d-4fea-8047-0980e3548bb2 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2103.824894] env[62684]: WARNING nova.compute.manager [req-d1ba5fd6-b3dc-4bf9-a710-0aec4c0ba362 req-98411cbe-4c7f-4443-9acd-12a798dafeb3 service nova] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Received unexpected event network-vif-plugged-8d2957b9-747d-4fea-8047-0980e3548bb2 for instance with vm_state building and task_state spawning. [ 2103.825800] env[62684]: DEBUG nova.compute.manager [req-d1ba5fd6-b3dc-4bf9-a710-0aec4c0ba362 req-98411cbe-4c7f-4443-9acd-12a798dafeb3 service nova] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Received event network-changed-8d2957b9-747d-4fea-8047-0980e3548bb2 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2103.826033] env[62684]: DEBUG nova.compute.manager [req-d1ba5fd6-b3dc-4bf9-a710-0aec4c0ba362 req-98411cbe-4c7f-4443-9acd-12a798dafeb3 service nova] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Refreshing instance network info cache due to event network-changed-8d2957b9-747d-4fea-8047-0980e3548bb2. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2103.826254] env[62684]: DEBUG oslo_concurrency.lockutils [req-d1ba5fd6-b3dc-4bf9-a710-0aec4c0ba362 req-98411cbe-4c7f-4443-9acd-12a798dafeb3 service nova] Acquiring lock "refresh_cache-28886f7c-6efc-4505-84f6-682d75cea215" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2103.879837] env[62684]: DEBUG nova.network.neutron [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Updating instance_info_cache with network_info: [{"id": "8d2957b9-747d-4fea-8047-0980e3548bb2", "address": "fa:16:3e:9d:9d:79", "network": {"id": "7678b347-6a54-4b84-9a4d-b566bbeb1ea4", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-51664912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76d88ac878d44480b3b54b24ab87efa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d2957b9-74", "ovs_interfaceid": "8d2957b9-747d-4fea-8047-0980e3548bb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2103.892678] env[62684]: DEBUG nova.compute.utils [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2103.896655] env[62684]: DEBUG nova.compute.manager [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2103.896835] env[62684]: DEBUG nova.network.neutron [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2103.970337] env[62684]: DEBUG nova.policy [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '96b96927115d49f2a04342784717e58e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '607a0aa1049640d882d7dd490f5f98ea', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2104.093552] env[62684]: DEBUG oslo_vmware.api [None req-6308299b-a785-4943-a283-a46c521cedea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053292, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2104.136921] env[62684]: DEBUG nova.objects.instance [None req-a3117d4d-56c8-46b8-8969-aa71cd533258 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lazy-loading 'flavor' on Instance uuid 025dfe36-1f14-4bda-84a0-d424364b745b {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2104.225719] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 31419285-9fdf-4d37-94d7-d1b08c6b6b05] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2104.293917] env[62684]: DEBUG nova.network.neutron [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Successfully created port: 0d755b84-8780-4295-a8a6-032192e3c6f7 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2104.323393] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2104.382200] env[62684]: DEBUG oslo_concurrency.lockutils [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Releasing lock "refresh_cache-28886f7c-6efc-4505-84f6-682d75cea215" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2104.382564] env[62684]: DEBUG nova.compute.manager [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Instance network_info: |[{"id": "8d2957b9-747d-4fea-8047-0980e3548bb2", "address": "fa:16:3e:9d:9d:79", "network": {"id": "7678b347-6a54-4b84-9a4d-b566bbeb1ea4", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-51664912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76d88ac878d44480b3b54b24ab87efa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d2957b9-74", "ovs_interfaceid": "8d2957b9-747d-4fea-8047-0980e3548bb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2104.383040] env[62684]: DEBUG oslo_concurrency.lockutils [req-d1ba5fd6-b3dc-4bf9-a710-0aec4c0ba362 req-98411cbe-4c7f-4443-9acd-12a798dafeb3 service nova] Acquired lock "refresh_cache-28886f7c-6efc-4505-84f6-682d75cea215" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2104.383266] env[62684]: DEBUG nova.network.neutron [req-d1ba5fd6-b3dc-4bf9-a710-0aec4c0ba362 req-98411cbe-4c7f-4443-9acd-12a798dafeb3 service nova] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Refreshing network info cache for port 8d2957b9-747d-4fea-8047-0980e3548bb2 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2104.386102] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9d:9d:79', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'de5fcb06-b0d0-467f-86fe-06882165ac31', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8d2957b9-747d-4fea-8047-0980e3548bb2', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2104.393096] env[62684]: DEBUG oslo.service.loopingcall [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2104.395873] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2104.396403] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-efbc5721-1af8-43a6-a682-73477e73a17c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.412612] env[62684]: DEBUG nova.compute.manager [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2104.425847] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2104.425847] env[62684]: value = "task-2053293" [ 2104.425847] env[62684]: _type = "Task" [ 2104.425847] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2104.435485] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053293, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2104.596489] env[62684]: DEBUG oslo_vmware.api [None req-6308299b-a785-4943-a283-a46c521cedea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053292, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2104.643684] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a3117d4d-56c8-46b8-8969-aa71cd533258 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Acquiring lock "refresh_cache-025dfe36-1f14-4bda-84a0-d424364b745b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2104.643828] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a3117d4d-56c8-46b8-8969-aa71cd533258 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Acquired lock "refresh_cache-025dfe36-1f14-4bda-84a0-d424364b745b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2104.644019] env[62684]: DEBUG nova.network.neutron [None req-a3117d4d-56c8-46b8-8969-aa71cd533258 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2104.644216] env[62684]: DEBUG nova.objects.instance [None req-a3117d4d-56c8-46b8-8969-aa71cd533258 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lazy-loading 'info_cache' on Instance uuid 025dfe36-1f14-4bda-84a0-d424364b745b {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2104.646141] env[62684]: DEBUG nova.network.neutron [req-d1ba5fd6-b3dc-4bf9-a710-0aec4c0ba362 req-98411cbe-4c7f-4443-9acd-12a798dafeb3 service nova] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Updated VIF entry in instance network info cache for port 8d2957b9-747d-4fea-8047-0980e3548bb2. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2104.646522] env[62684]: DEBUG nova.network.neutron [req-d1ba5fd6-b3dc-4bf9-a710-0aec4c0ba362 req-98411cbe-4c7f-4443-9acd-12a798dafeb3 service nova] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Updating instance_info_cache with network_info: [{"id": "8d2957b9-747d-4fea-8047-0980e3548bb2", "address": "fa:16:3e:9d:9d:79", "network": {"id": "7678b347-6a54-4b84-9a4d-b566bbeb1ea4", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-51664912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76d88ac878d44480b3b54b24ab87efa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d2957b9-74", "ovs_interfaceid": "8d2957b9-747d-4fea-8047-0980e3548bb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2104.677550] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa8a06bf-e885-46e0-b8dd-d8ecbaaa5a3b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.685445] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7de413d-5b12-41c0-a5a4-b4ecc59489b1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.719962] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bc05007-0ec9-41da-8c78-17239d2b356b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.727376] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9c64840-2911-4895-9178-08e843f6bbb1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.732095] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: a3c7943e-7528-41bc-9a20-1e2b57f832e3] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2104.742321] env[62684]: DEBUG nova.compute.provider_tree [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2104.935760] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053293, 'name': CreateVM_Task, 'duration_secs': 0.382951} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2104.935943] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2104.936670] env[62684]: DEBUG oslo_concurrency.lockutils [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2104.936847] env[62684]: DEBUG oslo_concurrency.lockutils [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2104.937247] env[62684]: DEBUG oslo_concurrency.lockutils [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2104.937517] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb2127ef-3bcc-4a97-9440-225992d698f1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.942199] env[62684]: DEBUG oslo_vmware.api [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2104.942199] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]528307c3-ceb5-65ea-1160-2372756b1b87" [ 2104.942199] env[62684]: _type = "Task" [ 2104.942199] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2104.950161] env[62684]: DEBUG oslo_vmware.api [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]528307c3-ceb5-65ea-1160-2372756b1b87, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2105.095148] env[62684]: DEBUG oslo_vmware.api [None req-6308299b-a785-4943-a283-a46c521cedea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053292, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2105.150898] env[62684]: DEBUG nova.objects.base [None req-a3117d4d-56c8-46b8-8969-aa71cd533258 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Object Instance<025dfe36-1f14-4bda-84a0-d424364b745b> lazy-loaded attributes: flavor,info_cache {{(pid=62684) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2105.152317] env[62684]: DEBUG oslo_concurrency.lockutils [req-d1ba5fd6-b3dc-4bf9-a710-0aec4c0ba362 req-98411cbe-4c7f-4443-9acd-12a798dafeb3 service nova] Releasing lock "refresh_cache-28886f7c-6efc-4505-84f6-682d75cea215" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2105.234841] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: df93c57e-716c-4c73-b551-9079a523ea0b] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2105.244945] env[62684]: DEBUG nova.scheduler.client.report [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2105.422276] env[62684]: DEBUG nova.compute.manager [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2105.448314] env[62684]: DEBUG nova.virt.hardware [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2105.448584] env[62684]: DEBUG nova.virt.hardware [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2105.448789] env[62684]: DEBUG nova.virt.hardware [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2105.449028] env[62684]: DEBUG nova.virt.hardware [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2105.449193] env[62684]: DEBUG nova.virt.hardware [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2105.449359] env[62684]: DEBUG nova.virt.hardware [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2105.449587] env[62684]: DEBUG nova.virt.hardware [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2105.449755] env[62684]: DEBUG nova.virt.hardware [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2105.449930] env[62684]: DEBUG nova.virt.hardware [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2105.450113] env[62684]: DEBUG nova.virt.hardware [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2105.450298] env[62684]: DEBUG nova.virt.hardware [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2105.451062] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e92bc96b-f616-44be-bba5-6bef49bfc7c5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.457032] env[62684]: DEBUG oslo_vmware.api [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]528307c3-ceb5-65ea-1160-2372756b1b87, 'name': SearchDatastore_Task, 'duration_secs': 0.009389} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2105.457664] env[62684]: DEBUG oslo_concurrency.lockutils [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2105.457928] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2105.458218] env[62684]: DEBUG oslo_concurrency.lockutils [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2105.458390] env[62684]: DEBUG oslo_concurrency.lockutils [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2105.458578] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2105.458830] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-95111159-c273-4635-ad39-538725c0b1ef {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.463402] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b381975-f4b6-4b48-9d71-1fc331ea75ac {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.470603] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2105.470806] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2105.478777] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-837e4f73-1d45-485f-bc3d-7687e5cf1293 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.483842] env[62684]: DEBUG oslo_vmware.api [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2105.483842] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b72f8d-6cdf-5f43-e003-de73fb61a02f" [ 2105.483842] env[62684]: _type = "Task" [ 2105.483842] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2105.490912] env[62684]: DEBUG oslo_vmware.api [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b72f8d-6cdf-5f43-e003-de73fb61a02f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2105.594746] env[62684]: DEBUG oslo_vmware.api [None req-6308299b-a785-4943-a283-a46c521cedea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053292, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2105.738230] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: b009f710-1a94-4113-8feb-7cc5dd6a6519] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2105.749554] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.366s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2105.750106] env[62684]: DEBUG nova.compute.manager [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2105.753076] env[62684]: DEBUG oslo_concurrency.lockutils [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.762s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2105.754856] env[62684]: INFO nova.compute.claims [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2105.801892] env[62684]: DEBUG nova.network.neutron [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Successfully updated port: 0d755b84-8780-4295-a8a6-032192e3c6f7 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2105.879901] env[62684]: DEBUG nova.compute.manager [req-d3fa852c-e0a4-443a-8b15-ab24fdc48739 req-98c76cf5-2b60-464f-8934-1bbd84529fee service nova] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Received event network-vif-plugged-0d755b84-8780-4295-a8a6-032192e3c6f7 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2105.880200] env[62684]: DEBUG oslo_concurrency.lockutils [req-d3fa852c-e0a4-443a-8b15-ab24fdc48739 req-98c76cf5-2b60-464f-8934-1bbd84529fee service nova] Acquiring lock "8cc68353-4678-4ee7-8c0d-3d71e6bf05bf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2105.880383] env[62684]: DEBUG oslo_concurrency.lockutils [req-d3fa852c-e0a4-443a-8b15-ab24fdc48739 req-98c76cf5-2b60-464f-8934-1bbd84529fee service nova] Lock "8cc68353-4678-4ee7-8c0d-3d71e6bf05bf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2105.880560] env[62684]: DEBUG oslo_concurrency.lockutils [req-d3fa852c-e0a4-443a-8b15-ab24fdc48739 req-98c76cf5-2b60-464f-8934-1bbd84529fee service nova] Lock "8cc68353-4678-4ee7-8c0d-3d71e6bf05bf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2105.880730] env[62684]: DEBUG nova.compute.manager [req-d3fa852c-e0a4-443a-8b15-ab24fdc48739 req-98c76cf5-2b60-464f-8934-1bbd84529fee service nova] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] No waiting events found dispatching network-vif-plugged-0d755b84-8780-4295-a8a6-032192e3c6f7 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2105.880898] env[62684]: WARNING nova.compute.manager [req-d3fa852c-e0a4-443a-8b15-ab24fdc48739 req-98c76cf5-2b60-464f-8934-1bbd84529fee service nova] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Received unexpected event network-vif-plugged-0d755b84-8780-4295-a8a6-032192e3c6f7 for instance with vm_state building and task_state spawning. [ 2105.881070] env[62684]: DEBUG nova.compute.manager [req-d3fa852c-e0a4-443a-8b15-ab24fdc48739 req-98c76cf5-2b60-464f-8934-1bbd84529fee service nova] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Received event network-changed-0d755b84-8780-4295-a8a6-032192e3c6f7 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2105.881232] env[62684]: DEBUG nova.compute.manager [req-d3fa852c-e0a4-443a-8b15-ab24fdc48739 req-98c76cf5-2b60-464f-8934-1bbd84529fee service nova] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Refreshing instance network info cache due to event network-changed-0d755b84-8780-4295-a8a6-032192e3c6f7. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2105.881414] env[62684]: DEBUG oslo_concurrency.lockutils [req-d3fa852c-e0a4-443a-8b15-ab24fdc48739 req-98c76cf5-2b60-464f-8934-1bbd84529fee service nova] Acquiring lock "refresh_cache-8cc68353-4678-4ee7-8c0d-3d71e6bf05bf" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2105.881609] env[62684]: DEBUG oslo_concurrency.lockutils [req-d3fa852c-e0a4-443a-8b15-ab24fdc48739 req-98c76cf5-2b60-464f-8934-1bbd84529fee service nova] Acquired lock "refresh_cache-8cc68353-4678-4ee7-8c0d-3d71e6bf05bf" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2105.881707] env[62684]: DEBUG nova.network.neutron [req-d3fa852c-e0a4-443a-8b15-ab24fdc48739 req-98c76cf5-2b60-464f-8934-1bbd84529fee service nova] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Refreshing network info cache for port 0d755b84-8780-4295-a8a6-032192e3c6f7 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2105.895393] env[62684]: DEBUG nova.network.neutron [None req-a3117d4d-56c8-46b8-8969-aa71cd533258 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Updating instance_info_cache with network_info: [{"id": "1d1c0f31-e026-45f0-b3c8-5ba02555e863", "address": "fa:16:3e:42:6e:d1", "network": {"id": "bf53c8de-5f43-4a15-9911-25340615a63b", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1946277195-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "540d70f4b6274c38a5e79c00e389d8fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6db039c-542c-4544-a57d-ddcc6c1e8e45", "external-id": "nsx-vlan-transportzone-810", "segmentation_id": 810, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d1c0f31-e0", "ovs_interfaceid": "1d1c0f31-e026-45f0-b3c8-5ba02555e863", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2105.994466] env[62684]: DEBUG oslo_vmware.api [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b72f8d-6cdf-5f43-e003-de73fb61a02f, 'name': SearchDatastore_Task, 'duration_secs': 0.042491} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2105.995265] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96cdefeb-8e1e-47cb-9bd5-15ae13b73053 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.000393] env[62684]: DEBUG oslo_vmware.api [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2106.000393] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52baea0d-0cea-5b1d-e0c6-c6d584747e99" [ 2106.000393] env[62684]: _type = "Task" [ 2106.000393] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2106.007853] env[62684]: DEBUG oslo_vmware.api [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52baea0d-0cea-5b1d-e0c6-c6d584747e99, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2106.096475] env[62684]: DEBUG oslo_vmware.api [None req-6308299b-a785-4943-a283-a46c521cedea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053292, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2106.242655] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: feca8680-4baa-4b2c-9875-69a88b351dc0] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2106.259724] env[62684]: DEBUG nova.compute.utils [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2106.264016] env[62684]: DEBUG nova.compute.manager [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2106.264203] env[62684]: DEBUG nova.network.neutron [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2106.301878] env[62684]: DEBUG nova.policy [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fe6c69e19eaa46978e4fe25513f42c0d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '359bcaa2eeb64bcbb6602062777b852e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2106.303606] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "refresh_cache-8cc68353-4678-4ee7-8c0d-3d71e6bf05bf" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2106.398685] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a3117d4d-56c8-46b8-8969-aa71cd533258 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Releasing lock "refresh_cache-025dfe36-1f14-4bda-84a0-d424364b745b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2106.413105] env[62684]: DEBUG nova.network.neutron [req-d3fa852c-e0a4-443a-8b15-ab24fdc48739 req-98c76cf5-2b60-464f-8934-1bbd84529fee service nova] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2106.483431] env[62684]: DEBUG nova.network.neutron [req-d3fa852c-e0a4-443a-8b15-ab24fdc48739 req-98c76cf5-2b60-464f-8934-1bbd84529fee service nova] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2106.515055] env[62684]: DEBUG oslo_vmware.api [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52baea0d-0cea-5b1d-e0c6-c6d584747e99, 'name': SearchDatastore_Task, 'duration_secs': 0.008839} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2106.515255] env[62684]: DEBUG oslo_concurrency.lockutils [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2106.515502] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 28886f7c-6efc-4505-84f6-682d75cea215/28886f7c-6efc-4505-84f6-682d75cea215.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2106.515776] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-107119c5-80f9-4c56-befa-d7e3ccdfd64f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.522878] env[62684]: DEBUG oslo_vmware.api [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2106.522878] env[62684]: value = "task-2053294" [ 2106.522878] env[62684]: _type = "Task" [ 2106.522878] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2106.531396] env[62684]: DEBUG oslo_vmware.api [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053294, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2106.537691] env[62684]: DEBUG nova.network.neutron [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Successfully created port: f0464f82-ef8e-43bd-a863-de4b524e43c8 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2106.596179] env[62684]: DEBUG oslo_vmware.api [None req-6308299b-a785-4943-a283-a46c521cedea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053292, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2106.747482] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: e3dd1bc0-f292-4ac7-a8db-324887a18411] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2106.767281] env[62684]: DEBUG nova.compute.manager [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2106.902480] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3117d4d-56c8-46b8-8969-aa71cd533258 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2106.902774] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5119d1f0-7e3f-4547-ad08-61d6ec31c517 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.914132] env[62684]: DEBUG oslo_vmware.api [None req-a3117d4d-56c8-46b8-8969-aa71cd533258 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Waiting for the task: (returnval){ [ 2106.914132] env[62684]: value = "task-2053295" [ 2106.914132] env[62684]: _type = "Task" [ 2106.914132] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2106.921898] env[62684]: DEBUG oslo_vmware.api [None req-a3117d4d-56c8-46b8-8969-aa71cd533258 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053295, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2106.986380] env[62684]: DEBUG oslo_concurrency.lockutils [req-d3fa852c-e0a4-443a-8b15-ab24fdc48739 req-98c76cf5-2b60-464f-8934-1bbd84529fee service nova] Releasing lock "refresh_cache-8cc68353-4678-4ee7-8c0d-3d71e6bf05bf" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2106.989392] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquired lock "refresh_cache-8cc68353-4678-4ee7-8c0d-3d71e6bf05bf" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2106.989552] env[62684]: DEBUG nova.network.neutron [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2107.034450] env[62684]: DEBUG oslo_vmware.api [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053294, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.451165} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2107.034716] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 28886f7c-6efc-4505-84f6-682d75cea215/28886f7c-6efc-4505-84f6-682d75cea215.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2107.034934] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2107.035213] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ede3c858-04b7-4dec-a1de-be1493bfbfdf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.041960] env[62684]: DEBUG oslo_vmware.api [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2107.041960] env[62684]: value = "task-2053296" [ 2107.041960] env[62684]: _type = "Task" [ 2107.041960] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2107.048557] env[62684]: DEBUG oslo_vmware.api [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053296, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2107.049971] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-876dae9d-0174-433b-ac3a-b9b6e6b5e716 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.056446] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f981fca-0774-468e-add7-082e6dc8ea03 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.087668] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ec2e63a-98b0-4bb2-b242-298040ac34eb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.100396] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e4f61a8-48a5-4dae-9040-32f60eb4cf9a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.104171] env[62684]: DEBUG oslo_vmware.api [None req-6308299b-a785-4943-a283-a46c521cedea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053292, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2107.114201] env[62684]: DEBUG nova.compute.provider_tree [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2107.251245] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 274d214a-4b92-4900-a66c-54baea2a68f8] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2107.423986] env[62684]: DEBUG oslo_vmware.api [None req-a3117d4d-56c8-46b8-8969-aa71cd533258 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053295, 'name': PowerOnVM_Task, 'duration_secs': 0.40856} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2107.424354] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3117d4d-56c8-46b8-8969-aa71cd533258 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2107.424564] env[62684]: DEBUG nova.compute.manager [None req-a3117d4d-56c8-46b8-8969-aa71cd533258 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2107.425341] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d88b21fa-dd46-495b-bf56-b3f94e2f2042 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.546851] env[62684]: DEBUG nova.network.neutron [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2107.553863] env[62684]: DEBUG oslo_vmware.api [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053296, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064432} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2107.554203] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2107.554976] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96fc942a-38e2-47c7-9b11-2169265c989e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.576962] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] 28886f7c-6efc-4505-84f6-682d75cea215/28886f7c-6efc-4505-84f6-682d75cea215.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2107.577249] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1165cf36-70c6-483f-98f7-21ed556e45dd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.603231] env[62684]: DEBUG oslo_vmware.api [None req-6308299b-a785-4943-a283-a46c521cedea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053292, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2107.604332] env[62684]: DEBUG oslo_vmware.api [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2107.604332] env[62684]: value = "task-2053297" [ 2107.604332] env[62684]: _type = "Task" [ 2107.604332] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2107.611466] env[62684]: DEBUG oslo_vmware.api [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053297, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2107.638082] env[62684]: ERROR nova.scheduler.client.report [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [req-8d831488-fd37-4d02-9c68-48d5b65a497d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c23c281e-ec1f-4876-972e-a98655f2084f. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-8d831488-fd37-4d02-9c68-48d5b65a497d"}]} [ 2107.654221] env[62684]: DEBUG nova.scheduler.client.report [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Refreshing inventories for resource provider c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2107.667465] env[62684]: DEBUG nova.scheduler.client.report [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Updating ProviderTree inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2107.667863] env[62684]: DEBUG nova.compute.provider_tree [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2107.679329] env[62684]: DEBUG nova.scheduler.client.report [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Refreshing aggregate associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, aggregates: None {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2107.696051] env[62684]: DEBUG nova.scheduler.client.report [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Refreshing trait associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2107.754815] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 8449f09b-4e7b-4511-bb3c-2ff6667addb2] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2107.776757] env[62684]: DEBUG nova.compute.manager [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2107.782730] env[62684]: DEBUG nova.network.neutron [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Updating instance_info_cache with network_info: [{"id": "0d755b84-8780-4295-a8a6-032192e3c6f7", "address": "fa:16:3e:03:1d:61", "network": {"id": "b24dd0c0-a394-4ca6-a79a-94535bc1df6f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2023102141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "607a0aa1049640d882d7dd490f5f98ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d755b84-87", "ovs_interfaceid": "0d755b84-8780-4295-a8a6-032192e3c6f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2107.808395] env[62684]: DEBUG nova.virt.hardware [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2107.808767] env[62684]: DEBUG nova.virt.hardware [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2107.808975] env[62684]: DEBUG nova.virt.hardware [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2107.809099] env[62684]: DEBUG nova.virt.hardware [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2107.809252] env[62684]: DEBUG nova.virt.hardware [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2107.809404] env[62684]: DEBUG nova.virt.hardware [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2107.809615] env[62684]: DEBUG nova.virt.hardware [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2107.809788] env[62684]: DEBUG nova.virt.hardware [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2107.809967] env[62684]: DEBUG nova.virt.hardware [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2107.810159] env[62684]: DEBUG nova.virt.hardware [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2107.810349] env[62684]: DEBUG nova.virt.hardware [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2107.811431] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d210676b-576e-4a2f-acc7-0c823b74bf3b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.819821] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c2f4648-53a1-4b30-95d1-91e8f3ebdb5b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.920041] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1206bfe8-8490-4f3c-a63e-4c442e422faf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.926045] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af783636-49a0-44c5-a98f-9f95c0f0f281 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.960515] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88deed36-a638-4f10-b53e-be806dc68c75 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.968528] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf08be50-442c-4678-b4a5-62803dd23d19 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.982261] env[62684]: DEBUG nova.compute.provider_tree [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2108.105476] env[62684]: DEBUG oslo_vmware.api [None req-6308299b-a785-4943-a283-a46c521cedea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053292, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2108.113432] env[62684]: DEBUG oslo_vmware.api [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053297, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2108.260202] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 02dc8c41-5092-4f84-9722-37d4df3a459a] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2108.287264] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Releasing lock "refresh_cache-8cc68353-4678-4ee7-8c0d-3d71e6bf05bf" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2108.287561] env[62684]: DEBUG nova.compute.manager [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Instance network_info: |[{"id": "0d755b84-8780-4295-a8a6-032192e3c6f7", "address": "fa:16:3e:03:1d:61", "network": {"id": "b24dd0c0-a394-4ca6-a79a-94535bc1df6f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2023102141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "607a0aa1049640d882d7dd490f5f98ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d755b84-87", "ovs_interfaceid": "0d755b84-8780-4295-a8a6-032192e3c6f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2108.288196] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:03:1d:61', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0d755b84-8780-4295-a8a6-032192e3c6f7', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2108.295667] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Creating folder: Project (607a0aa1049640d882d7dd490f5f98ea). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2108.295986] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7d8aa679-2343-4489-94f0-22d9ed159250 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.307430] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Created folder: Project (607a0aa1049640d882d7dd490f5f98ea) in parent group-v421118. [ 2108.307621] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Creating folder: Instances. Parent ref: group-v421343. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2108.307894] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-30265c9c-96a4-43de-a660-8b59d9cf1022 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.316907] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Created folder: Instances in parent group-v421343. [ 2108.317169] env[62684]: DEBUG oslo.service.loopingcall [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2108.317370] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2108.317632] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-beb4ad10-5298-421c-92ae-0aebd414e912 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.339655] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2108.339655] env[62684]: value = "task-2053300" [ 2108.339655] env[62684]: _type = "Task" [ 2108.339655] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2108.346733] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053300, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2108.513462] env[62684]: DEBUG nova.scheduler.client.report [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Updated inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f with generation 121 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2108.513813] env[62684]: DEBUG nova.compute.provider_tree [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Updating resource provider c23c281e-ec1f-4876-972e-a98655f2084f generation from 121 to 122 during operation: update_inventory {{(pid=62684) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2108.514065] env[62684]: DEBUG nova.compute.provider_tree [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2108.604848] env[62684]: DEBUG oslo_vmware.api [None req-6308299b-a785-4943-a283-a46c521cedea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053292, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2108.613785] env[62684]: DEBUG oslo_vmware.api [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053297, 'name': ReconfigVM_Task, 'duration_secs': 0.920366} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2108.613785] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Reconfigured VM instance instance-00000051 to attach disk [datastore2] 28886f7c-6efc-4505-84f6-682d75cea215/28886f7c-6efc-4505-84f6-682d75cea215.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2108.614345] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ad46edc2-4550-4a22-ba55-18f45ff582a3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.620314] env[62684]: DEBUG oslo_vmware.api [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2108.620314] env[62684]: value = "task-2053301" [ 2108.620314] env[62684]: _type = "Task" [ 2108.620314] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2108.627528] env[62684]: DEBUG oslo_vmware.api [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053301, 'name': Rename_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2108.764131] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 26303c0e-be87-41ff-a15c-e92f91f8a05f] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2108.849373] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053300, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2109.019557] env[62684]: DEBUG oslo_concurrency.lockutils [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.266s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2109.020133] env[62684]: DEBUG nova.compute.manager [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2109.022802] env[62684]: DEBUG oslo_concurrency.lockutils [None req-631ae091-e487-4050-acd5-c5d701be3097 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.185s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2109.023502] env[62684]: DEBUG nova.objects.instance [None req-631ae091-e487-4050-acd5-c5d701be3097 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Lazy-loading 'resources' on Instance uuid 9964237b-db9b-49cc-a9bd-d62329ea564e {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2109.105495] env[62684]: DEBUG oslo_vmware.api [None req-6308299b-a785-4943-a283-a46c521cedea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053292, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2109.129692] env[62684]: DEBUG oslo_vmware.api [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053301, 'name': Rename_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2109.268085] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 50bc9674-d19c-40f1-a89f-1738a1e48307] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2109.351273] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053300, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2109.526788] env[62684]: DEBUG nova.compute.utils [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2109.528643] env[62684]: DEBUG nova.compute.manager [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2109.528643] env[62684]: DEBUG nova.network.neutron [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2109.594195] env[62684]: DEBUG nova.policy [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0f3a4c8387a64e32947880017d2abeb6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '43304d5c52344bd9841dbc760a174b4f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2109.607286] env[62684]: DEBUG oslo_vmware.api [None req-6308299b-a785-4943-a283-a46c521cedea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053292, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2109.630766] env[62684]: DEBUG oslo_vmware.api [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053301, 'name': Rename_Task, 'duration_secs': 0.967999} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2109.630766] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2109.631035] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b00d87c9-a72f-49b7-84c3-58cd5e23dbac {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.639317] env[62684]: DEBUG oslo_vmware.api [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2109.639317] env[62684]: value = "task-2053302" [ 2109.639317] env[62684]: _type = "Task" [ 2109.639317] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2109.648203] env[62684]: DEBUG oslo_vmware.api [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053302, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2109.771422] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: b945f05d-ef1c-4469-9390-f7bbd4f435f0] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2109.787946] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ed24e91-7f91-4d19-abdc-1914af296049 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.796129] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28550995-ff1d-40ee-a667-999f798b07c1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.828899] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caaf96ba-0e22-43ad-b2d3-f226c08c68de {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.838613] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e878906-9db4-4f35-a6f9-11d21db59e7f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.850549] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053300, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2109.858690] env[62684]: DEBUG nova.compute.provider_tree [None req-631ae091-e487-4050-acd5-c5d701be3097 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2109.976271] env[62684]: DEBUG nova.network.neutron [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Successfully created port: c12d6728-00ee-47e7-9fa8-92384e9f7a3c {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2110.034318] env[62684]: DEBUG nova.compute.manager [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2110.110636] env[62684]: DEBUG oslo_vmware.api [None req-6308299b-a785-4943-a283-a46c521cedea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053292, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2110.150056] env[62684]: DEBUG oslo_vmware.api [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053302, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2110.279039] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 5237ffbf-9ec2-40d7-8ab7-fb66d856d6e7] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2110.354080] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053300, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2110.362095] env[62684]: DEBUG nova.scheduler.client.report [None req-631ae091-e487-4050-acd5-c5d701be3097 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2110.608919] env[62684]: DEBUG oslo_vmware.api [None req-6308299b-a785-4943-a283-a46c521cedea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053292, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2110.653480] env[62684]: DEBUG oslo_vmware.api [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053302, 'name': PowerOnVM_Task, 'duration_secs': 0.686452} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2110.653891] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2110.654263] env[62684]: INFO nova.compute.manager [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Took 9.52 seconds to spawn the instance on the hypervisor. [ 2110.654591] env[62684]: DEBUG nova.compute.manager [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2110.655777] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de287f0a-1423-4666-818c-7a872747d863 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.780795] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: c6dc5401-f59e-4c18-9553-1240e2f49bce] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2110.853241] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053300, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2110.867184] env[62684]: DEBUG oslo_concurrency.lockutils [None req-631ae091-e487-4050-acd5-c5d701be3097 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.844s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2110.869572] env[62684]: DEBUG oslo_concurrency.lockutils [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 14.851s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2110.869962] env[62684]: DEBUG nova.objects.instance [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62684) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 2110.892875] env[62684]: INFO nova.scheduler.client.report [None req-631ae091-e487-4050-acd5-c5d701be3097 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Deleted allocations for instance 9964237b-db9b-49cc-a9bd-d62329ea564e [ 2111.046659] env[62684]: DEBUG nova.compute.manager [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2111.074829] env[62684]: DEBUG nova.virt.hardware [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2111.075127] env[62684]: DEBUG nova.virt.hardware [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2111.075203] env[62684]: DEBUG nova.virt.hardware [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2111.075394] env[62684]: DEBUG nova.virt.hardware [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2111.075543] env[62684]: DEBUG nova.virt.hardware [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2111.075768] env[62684]: DEBUG nova.virt.hardware [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2111.075990] env[62684]: DEBUG nova.virt.hardware [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2111.076354] env[62684]: DEBUG nova.virt.hardware [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2111.076540] env[62684]: DEBUG nova.virt.hardware [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2111.076714] env[62684]: DEBUG nova.virt.hardware [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2111.076923] env[62684]: DEBUG nova.virt.hardware [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2111.077810] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c61a89c8-271b-418f-8ad6-59f035f15c5f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.085624] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2128a402-dc7e-415a-97ef-788b6e925656 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.108056] env[62684]: DEBUG oslo_vmware.api [None req-6308299b-a785-4943-a283-a46c521cedea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053292, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2111.173791] env[62684]: INFO nova.compute.manager [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Took 23.59 seconds to build instance. [ 2111.283657] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: cfe219da-adf9-44b9-9df3-752ccf72a68b] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2111.353719] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053300, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2111.399822] env[62684]: DEBUG oslo_concurrency.lockutils [None req-631ae091-e487-4050-acd5-c5d701be3097 tempest-ServerShowV247Test-1709862823 tempest-ServerShowV247Test-1709862823-project-member] Lock "9964237b-db9b-49cc-a9bd-d62329ea564e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.300s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2111.610440] env[62684]: DEBUG oslo_vmware.api [None req-6308299b-a785-4943-a283-a46c521cedea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053292, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2111.675979] env[62684]: DEBUG oslo_concurrency.lockutils [None req-265efad5-303e-4039-93e3-fdd28e33fcfb tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "28886f7c-6efc-4505-84f6-682d75cea215" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.106s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2111.787267] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: b4cd871a-30ea-4b7a-98ad-00b8676dc2cd] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2111.854592] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053300, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2111.878588] env[62684]: DEBUG oslo_concurrency.lockutils [None req-79e7c8d0-b533-4ccf-80ef-6d909c0a10fe tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2111.879923] env[62684]: DEBUG oslo_concurrency.lockutils [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.603s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2111.881778] env[62684]: INFO nova.compute.claims [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2112.110913] env[62684]: DEBUG oslo_vmware.api [None req-6308299b-a785-4943-a283-a46c521cedea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053292, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2112.241521] env[62684]: INFO nova.compute.manager [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Rebuilding instance [ 2112.285926] env[62684]: DEBUG nova.compute.manager [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2112.286808] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44f789f9-9635-4ea6-b29a-793f1e94d0f4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.289730] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 4a15d298-115f-4132-8be0-00e623fa21d8] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2112.354636] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053300, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2112.611641] env[62684]: DEBUG oslo_vmware.api [None req-6308299b-a785-4943-a283-a46c521cedea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053292, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2112.793050] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: e2a9ab56-bde3-40b6-a214-19c77a9c6778] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2112.799895] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2112.800113] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b3b41734-9e73-47a2-953b-b87cc090cb55 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.807155] env[62684]: DEBUG oslo_vmware.api [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2112.807155] env[62684]: value = "task-2053303" [ 2112.807155] env[62684]: _type = "Task" [ 2112.807155] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2112.815895] env[62684]: DEBUG oslo_vmware.api [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053303, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2112.855930] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053300, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2113.122568] env[62684]: DEBUG oslo_vmware.api [None req-6308299b-a785-4943-a283-a46c521cedea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053292, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2113.125092] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa3fc71d-7e01-4bef-8ed9-6dd6aa6ecb54 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.132569] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-781a2a5f-6938-46bb-97af-c3bfdd9a8da4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.161482] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f71428cd-f0b7-44d0-a9f9-bcaa30ccc5fa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.168382] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73760074-35b3-401b-8b86-0a8889c533d1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.180911] env[62684]: DEBUG nova.compute.provider_tree [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2113.296413] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: d06f3099-d05f-417f-a71a-7b368590624f] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2113.316609] env[62684]: DEBUG oslo_vmware.api [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053303, 'name': PowerOffVM_Task, 'duration_secs': 0.191706} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2113.316851] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2113.317051] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2113.318438] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62476314-6d93-459a-a3f4-d8c0968f40ba {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.325433] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2113.325712] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f7643937-fba8-4ebb-8481-260a3da9f9e8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.355660] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053300, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2113.612962] env[62684]: DEBUG oslo_vmware.api [None req-6308299b-a785-4943-a283-a46c521cedea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053292, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2113.684172] env[62684]: DEBUG nova.scheduler.client.report [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2113.799994] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: b788c51b-367b-4eef-93d2-faa8836469b6] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2113.856703] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053300, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2113.915291] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2113.915521] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2113.915756] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Deleting the datastore file [datastore2] 28886f7c-6efc-4505-84f6-682d75cea215 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2113.916078] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-38e5739d-47c2-456a-a54b-a6df806d6419 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.923467] env[62684]: DEBUG oslo_vmware.api [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2113.923467] env[62684]: value = "task-2053305" [ 2113.923467] env[62684]: _type = "Task" [ 2113.923467] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2113.939531] env[62684]: DEBUG oslo_vmware.api [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053305, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2114.113148] env[62684]: DEBUG oslo_vmware.api [None req-6308299b-a785-4943-a283-a46c521cedea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053292, 'name': ReconfigVM_Task, 'duration_secs': 10.516317} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2114.113409] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6308299b-a785-4943-a283-a46c521cedea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Releasing lock "81b7949d-be24-46c9-8dc8-c249b65bb039" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2114.113607] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6308299b-a785-4943-a283-a46c521cedea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Reconfigured VM to detach interface {{(pid=62684) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 2114.188741] env[62684]: DEBUG oslo_concurrency.lockutils [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.309s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2114.189330] env[62684]: DEBUG nova.compute.manager [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2114.192020] env[62684]: DEBUG oslo_concurrency.lockutils [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.646s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2114.192320] env[62684]: DEBUG nova.objects.instance [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Lazy-loading 'resources' on Instance uuid 4cf48f05-d643-47e6-9a0b-33415d80890c {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2114.302983] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: e08f8636-5193-40fa-972c-f0ecab193fc1] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2114.344447] env[62684]: DEBUG nova.compute.manager [req-ab45b54d-6868-4c57-8bc8-0a4daf9d683e req-23c7e67b-7b66-4cdb-b193-0c13506ae2e1 service nova] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Received event network-vif-deleted-ff6434b2-d91e-43cc-b6f8-03cec921c38d {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2114.344658] env[62684]: INFO nova.compute.manager [req-ab45b54d-6868-4c57-8bc8-0a4daf9d683e req-23c7e67b-7b66-4cdb-b193-0c13506ae2e1 service nova] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Neutron deleted interface ff6434b2-d91e-43cc-b6f8-03cec921c38d; detaching it from the instance and deleting it from the info cache [ 2114.344954] env[62684]: DEBUG nova.network.neutron [req-ab45b54d-6868-4c57-8bc8-0a4daf9d683e req-23c7e67b-7b66-4cdb-b193-0c13506ae2e1 service nova] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Updating instance_info_cache with network_info: [{"id": "fafc2062-9754-4ce0-8647-362b6bb8f8d7", "address": "fa:16:3e:a1:7d:89", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.142", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfafc2062-97", "ovs_interfaceid": "fafc2062-9754-4ce0-8647-362b6bb8f8d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5f8a8ef1-7f32-48b8-9444-b5fc1ab25e68", "address": "fa:16:3e:f0:7c:90", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f8a8ef1-7f", "ovs_interfaceid": "5f8a8ef1-7f32-48b8-9444-b5fc1ab25e68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2114.358526] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053300, 'name': CreateVM_Task, 'duration_secs': 5.831301} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2114.359243] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2114.359933] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2114.360129] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2114.360461] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2114.360935] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb2d9f59-614b-4b25-92cf-b0043b3dedfb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.365818] env[62684]: DEBUG oslo_vmware.api [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2114.365818] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52293f54-fa96-d091-3376-3b1d8a4f4ada" [ 2114.365818] env[62684]: _type = "Task" [ 2114.365818] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2114.373930] env[62684]: DEBUG oslo_vmware.api [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52293f54-fa96-d091-3376-3b1d8a4f4ada, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2114.433824] env[62684]: DEBUG oslo_vmware.api [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053305, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160334} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2114.434289] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2114.434626] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2114.434931] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2114.695506] env[62684]: DEBUG nova.compute.utils [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2114.700053] env[62684]: DEBUG nova.compute.manager [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2114.700244] env[62684]: DEBUG nova.network.neutron [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2114.800614] env[62684]: DEBUG nova.policy [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '659966560e00489a8845cf31cb3eaac4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '70dbc7d95c2e4e23b1748be1626087c0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2114.806172] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 52839b18-a68a-4ec7-a921-c42454955e82] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2114.847605] env[62684]: DEBUG oslo_concurrency.lockutils [req-ab45b54d-6868-4c57-8bc8-0a4daf9d683e req-23c7e67b-7b66-4cdb-b193-0c13506ae2e1 service nova] Acquiring lock "81b7949d-be24-46c9-8dc8-c249b65bb039" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2114.847860] env[62684]: DEBUG oslo_concurrency.lockutils [req-ab45b54d-6868-4c57-8bc8-0a4daf9d683e req-23c7e67b-7b66-4cdb-b193-0c13506ae2e1 service nova] Acquired lock "81b7949d-be24-46c9-8dc8-c249b65bb039" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2114.849053] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0db8ba22-5592-46de-9791-f511bd7a6af4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.872961] env[62684]: DEBUG oslo_concurrency.lockutils [req-ab45b54d-6868-4c57-8bc8-0a4daf9d683e req-23c7e67b-7b66-4cdb-b193-0c13506ae2e1 service nova] Releasing lock "81b7949d-be24-46c9-8dc8-c249b65bb039" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2114.873288] env[62684]: WARNING nova.compute.manager [req-ab45b54d-6868-4c57-8bc8-0a4daf9d683e req-23c7e67b-7b66-4cdb-b193-0c13506ae2e1 service nova] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Detach interface failed, port_id=ff6434b2-d91e-43cc-b6f8-03cec921c38d, reason: No device with interface-id ff6434b2-d91e-43cc-b6f8-03cec921c38d exists on VM: nova.exception.NotFound: No device with interface-id ff6434b2-d91e-43cc-b6f8-03cec921c38d exists on VM [ 2114.885360] env[62684]: DEBUG oslo_vmware.api [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52293f54-fa96-d091-3376-3b1d8a4f4ada, 'name': SearchDatastore_Task, 'duration_secs': 0.00863} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2114.886439] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2114.886439] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2114.886733] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2114.886847] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2114.886920] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2114.887236] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-df71477e-ba2e-414c-8761-2985d1689de5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.898180] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2114.898382] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2114.899167] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b9adb17-8784-4f36-b8b3-e4fcd8986ed9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.905645] env[62684]: DEBUG oslo_vmware.api [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2114.905645] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]527e1f63-2b78-100a-452b-72e47194e80b" [ 2114.905645] env[62684]: _type = "Task" [ 2114.905645] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2114.916551] env[62684]: DEBUG oslo_vmware.api [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]527e1f63-2b78-100a-452b-72e47194e80b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2115.012787] env[62684]: DEBUG nova.compute.manager [req-0346a4aa-ae5f-4a4b-9f3a-f187c9b3a61f req-bdb149a8-4c80-4319-a94f-8233c7a2de61 service nova] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Received event network-vif-plugged-f0464f82-ef8e-43bd-a863-de4b524e43c8 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2115.013031] env[62684]: DEBUG oslo_concurrency.lockutils [req-0346a4aa-ae5f-4a4b-9f3a-f187c9b3a61f req-bdb149a8-4c80-4319-a94f-8233c7a2de61 service nova] Acquiring lock "e8c90faa-2c25-4308-9781-80d308b9211c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2115.013258] env[62684]: DEBUG oslo_concurrency.lockutils [req-0346a4aa-ae5f-4a4b-9f3a-f187c9b3a61f req-bdb149a8-4c80-4319-a94f-8233c7a2de61 service nova] Lock "e8c90faa-2c25-4308-9781-80d308b9211c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2115.013435] env[62684]: DEBUG oslo_concurrency.lockutils [req-0346a4aa-ae5f-4a4b-9f3a-f187c9b3a61f req-bdb149a8-4c80-4319-a94f-8233c7a2de61 service nova] Lock "e8c90faa-2c25-4308-9781-80d308b9211c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2115.013610] env[62684]: DEBUG nova.compute.manager [req-0346a4aa-ae5f-4a4b-9f3a-f187c9b3a61f req-bdb149a8-4c80-4319-a94f-8233c7a2de61 service nova] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] No waiting events found dispatching network-vif-plugged-f0464f82-ef8e-43bd-a863-de4b524e43c8 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2115.013849] env[62684]: WARNING nova.compute.manager [req-0346a4aa-ae5f-4a4b-9f3a-f187c9b3a61f req-bdb149a8-4c80-4319-a94f-8233c7a2de61 service nova] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Received unexpected event network-vif-plugged-f0464f82-ef8e-43bd-a863-de4b524e43c8 for instance with vm_state building and task_state spawning. [ 2115.046405] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccd802b7-8a7f-4ea4-b2f1-490a60ec10a6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.056056] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47271a46-1a14-4a0f-94d1-9917e3fbdf2f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.088895] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ac3341e-a552-4654-9fe4-bac60b8e5f4d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.097069] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-265dae45-6203-41af-a919-1289a4f240f6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.102020] env[62684]: DEBUG nova.network.neutron [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Successfully updated port: f0464f82-ef8e-43bd-a863-de4b524e43c8 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2115.119580] env[62684]: DEBUG nova.compute.provider_tree [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2115.201164] env[62684]: DEBUG nova.compute.manager [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2115.313178] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 6d4061e4-a074-445d-95c5-239014ee87f3] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2115.322603] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6308299b-a785-4943-a283-a46c521cedea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "refresh_cache-81b7949d-be24-46c9-8dc8-c249b65bb039" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2115.322806] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6308299b-a785-4943-a283-a46c521cedea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquired lock "refresh_cache-81b7949d-be24-46c9-8dc8-c249b65bb039" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2115.322989] env[62684]: DEBUG nova.network.neutron [None req-6308299b-a785-4943-a283-a46c521cedea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2115.416115] env[62684]: DEBUG oslo_vmware.api [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]527e1f63-2b78-100a-452b-72e47194e80b, 'name': SearchDatastore_Task, 'duration_secs': 0.009214} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2115.416857] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bad8ddcf-3713-431f-b09e-875377f6675b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.421995] env[62684]: DEBUG oslo_vmware.api [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2115.421995] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ba7410-82a7-1d40-edfa-a41fc786769a" [ 2115.421995] env[62684]: _type = "Task" [ 2115.421995] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2115.429443] env[62684]: DEBUG oslo_vmware.api [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ba7410-82a7-1d40-edfa-a41fc786769a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2115.459044] env[62684]: DEBUG nova.network.neutron [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Successfully created port: f01f26d4-f4bb-4d9a-b6dd-3451ffb353bb {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2115.472031] env[62684]: DEBUG nova.virt.hardware [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2115.472300] env[62684]: DEBUG nova.virt.hardware [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2115.472463] env[62684]: DEBUG nova.virt.hardware [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2115.472657] env[62684]: DEBUG nova.virt.hardware [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2115.472845] env[62684]: DEBUG nova.virt.hardware [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2115.473134] env[62684]: DEBUG nova.virt.hardware [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2115.473428] env[62684]: DEBUG nova.virt.hardware [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2115.473664] env[62684]: DEBUG nova.virt.hardware [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2115.473918] env[62684]: DEBUG nova.virt.hardware [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2115.474192] env[62684]: DEBUG nova.virt.hardware [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2115.474437] env[62684]: DEBUG nova.virt.hardware [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2115.475651] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dae4002-4b0b-4a9f-9e55-a6d95e1773bd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.486151] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d18df5b-3a93-4267-bf0d-896cb92d9f37 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.506939] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9d:9d:79', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'de5fcb06-b0d0-467f-86fe-06882165ac31', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8d2957b9-747d-4fea-8047-0980e3548bb2', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2115.515419] env[62684]: DEBUG oslo.service.loopingcall [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2115.515930] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2115.516303] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d5558328-52f4-49b1-bf81-d60e7b9f457f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.536981] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2115.536981] env[62684]: value = "task-2053306" [ 2115.536981] env[62684]: _type = "Task" [ 2115.536981] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2115.545592] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053306, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2115.574450] env[62684]: DEBUG nova.network.neutron [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Successfully updated port: c12d6728-00ee-47e7-9fa8-92384e9f7a3c {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2115.606883] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Acquiring lock "refresh_cache-e8c90faa-2c25-4308-9781-80d308b9211c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2115.606883] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Acquired lock "refresh_cache-e8c90faa-2c25-4308-9781-80d308b9211c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2115.606883] env[62684]: DEBUG nova.network.neutron [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2115.663022] env[62684]: DEBUG nova.scheduler.client.report [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Updated inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f with generation 122 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2115.663022] env[62684]: DEBUG nova.compute.provider_tree [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Updating resource provider c23c281e-ec1f-4876-972e-a98655f2084f generation from 122 to 123 during operation: update_inventory {{(pid=62684) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2115.663022] env[62684]: DEBUG nova.compute.provider_tree [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2115.815439] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 3a172e9f-9f79-489e-9571-80bd74ad8609] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2115.934120] env[62684]: DEBUG oslo_vmware.api [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ba7410-82a7-1d40-edfa-a41fc786769a, 'name': SearchDatastore_Task, 'duration_secs': 0.010171} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2115.937168] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2115.937579] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf/8cc68353-4678-4ee7-8c0d-3d71e6bf05bf.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2115.938206] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7e45b83f-2418-47f6-99cb-522026d5b23d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.949022] env[62684]: DEBUG oslo_vmware.api [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2115.949022] env[62684]: value = "task-2053307" [ 2115.949022] env[62684]: _type = "Task" [ 2115.949022] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2115.956069] env[62684]: DEBUG oslo_vmware.api [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053307, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2116.053391] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053306, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2116.062162] env[62684]: DEBUG oslo_concurrency.lockutils [None req-31fc0b71-6d85-4d69-8e7a-40f1187f8ef0 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "81b7949d-be24-46c9-8dc8-c249b65bb039" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2116.062571] env[62684]: DEBUG oslo_concurrency.lockutils [None req-31fc0b71-6d85-4d69-8e7a-40f1187f8ef0 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "81b7949d-be24-46c9-8dc8-c249b65bb039" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2116.062858] env[62684]: DEBUG oslo_concurrency.lockutils [None req-31fc0b71-6d85-4d69-8e7a-40f1187f8ef0 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "81b7949d-be24-46c9-8dc8-c249b65bb039-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2116.063128] env[62684]: DEBUG oslo_concurrency.lockutils [None req-31fc0b71-6d85-4d69-8e7a-40f1187f8ef0 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "81b7949d-be24-46c9-8dc8-c249b65bb039-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2116.063684] env[62684]: DEBUG oslo_concurrency.lockutils [None req-31fc0b71-6d85-4d69-8e7a-40f1187f8ef0 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "81b7949d-be24-46c9-8dc8-c249b65bb039-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2116.065788] env[62684]: INFO nova.compute.manager [None req-31fc0b71-6d85-4d69-8e7a-40f1187f8ef0 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Terminating instance [ 2116.067935] env[62684]: DEBUG nova.compute.manager [None req-31fc0b71-6d85-4d69-8e7a-40f1187f8ef0 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2116.068232] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-31fc0b71-6d85-4d69-8e7a-40f1187f8ef0 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2116.069153] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f6fd618-2e73-466c-a859-47a7afd0c190 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.077394] env[62684]: DEBUG oslo_concurrency.lockutils [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Acquiring lock "refresh_cache-3a967adf-8c46-4787-b1d1-4ed701399576" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2116.077719] env[62684]: DEBUG oslo_concurrency.lockutils [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Acquired lock "refresh_cache-3a967adf-8c46-4787-b1d1-4ed701399576" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2116.077978] env[62684]: DEBUG nova.network.neutron [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2116.082789] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-31fc0b71-6d85-4d69-8e7a-40f1187f8ef0 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2116.082789] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ffdba761-e8b4-4168-9b97-bfc0ac53f018 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.094451] env[62684]: DEBUG oslo_vmware.api [None req-31fc0b71-6d85-4d69-8e7a-40f1187f8ef0 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 2116.094451] env[62684]: value = "task-2053308" [ 2116.094451] env[62684]: _type = "Task" [ 2116.094451] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2116.106342] env[62684]: DEBUG oslo_vmware.api [None req-31fc0b71-6d85-4d69-8e7a-40f1187f8ef0 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053308, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2116.147541] env[62684]: DEBUG nova.network.neutron [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2116.168015] env[62684]: DEBUG oslo_concurrency.lockutils [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.976s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2116.170699] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a782d755-109e-4683-8c6e-c51d64e12d96 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.366s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2116.171945] env[62684]: DEBUG nova.objects.instance [None req-a782d755-109e-4683-8c6e-c51d64e12d96 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lazy-loading 'resources' on Instance uuid 23578214-6708-43ae-88ce-56212083532a {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2116.193993] env[62684]: INFO nova.scheduler.client.report [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Deleted allocations for instance 4cf48f05-d643-47e6-9a0b-33415d80890c [ 2116.211815] env[62684]: DEBUG nova.compute.manager [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2116.248524] env[62684]: DEBUG nova.virt.hardware [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2116.248897] env[62684]: DEBUG nova.virt.hardware [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2116.249099] env[62684]: DEBUG nova.virt.hardware [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2116.249312] env[62684]: DEBUG nova.virt.hardware [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2116.249468] env[62684]: DEBUG nova.virt.hardware [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2116.249627] env[62684]: DEBUG nova.virt.hardware [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2116.249847] env[62684]: DEBUG nova.virt.hardware [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2116.250025] env[62684]: DEBUG nova.virt.hardware [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2116.250219] env[62684]: DEBUG nova.virt.hardware [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2116.250389] env[62684]: DEBUG nova.virt.hardware [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2116.250620] env[62684]: DEBUG nova.virt.hardware [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2116.252021] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3099e5d-df43-4024-85f3-72bd2850a35a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.263478] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d532323c-1692-4bd4-bb16-e48e17f8d1b2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.319651] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: dfe40a8c-61d6-4c60-afd3-0defb61c4308] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2116.347433] env[62684]: INFO nova.network.neutron [None req-6308299b-a785-4943-a283-a46c521cedea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Port 5f8a8ef1-7f32-48b8-9444-b5fc1ab25e68 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 2116.347532] env[62684]: DEBUG nova.network.neutron [None req-6308299b-a785-4943-a283-a46c521cedea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Updating instance_info_cache with network_info: [{"id": "fafc2062-9754-4ce0-8647-362b6bb8f8d7", "address": "fa:16:3e:a1:7d:89", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.142", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfafc2062-97", "ovs_interfaceid": "fafc2062-9754-4ce0-8647-362b6bb8f8d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2116.356546] env[62684]: DEBUG nova.network.neutron [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Updating instance_info_cache with network_info: [{"id": "f0464f82-ef8e-43bd-a863-de4b524e43c8", "address": "fa:16:3e:3a:6a:da", "network": {"id": "eb5671f2-a648-452e-a4c0-e81eb932b49c", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-524881696-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "359bcaa2eeb64bcbb6602062777b852e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "001929c7-0dc4-4b73-a9f1-d672f8377985", "external-id": "nsx-vlan-transportzone-230", "segmentation_id": 230, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0464f82-ef", "ovs_interfaceid": "f0464f82-ef8e-43bd-a863-de4b524e43c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2116.384460] env[62684]: DEBUG nova.compute.manager [req-fe34ce92-af45-48f4-8dd0-d4be2c63cda3 req-c3ea5233-1856-4f37-9584-bfc2d82f088c service nova] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Received event network-vif-plugged-c12d6728-00ee-47e7-9fa8-92384e9f7a3c {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2116.385064] env[62684]: DEBUG oslo_concurrency.lockutils [req-fe34ce92-af45-48f4-8dd0-d4be2c63cda3 req-c3ea5233-1856-4f37-9584-bfc2d82f088c service nova] Acquiring lock "3a967adf-8c46-4787-b1d1-4ed701399576-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2116.385064] env[62684]: DEBUG oslo_concurrency.lockutils [req-fe34ce92-af45-48f4-8dd0-d4be2c63cda3 req-c3ea5233-1856-4f37-9584-bfc2d82f088c service nova] Lock "3a967adf-8c46-4787-b1d1-4ed701399576-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2116.385446] env[62684]: DEBUG oslo_concurrency.lockutils [req-fe34ce92-af45-48f4-8dd0-d4be2c63cda3 req-c3ea5233-1856-4f37-9584-bfc2d82f088c service nova] Lock "3a967adf-8c46-4787-b1d1-4ed701399576-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2116.385446] env[62684]: DEBUG nova.compute.manager [req-fe34ce92-af45-48f4-8dd0-d4be2c63cda3 req-c3ea5233-1856-4f37-9584-bfc2d82f088c service nova] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] No waiting events found dispatching network-vif-plugged-c12d6728-00ee-47e7-9fa8-92384e9f7a3c {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2116.385692] env[62684]: WARNING nova.compute.manager [req-fe34ce92-af45-48f4-8dd0-d4be2c63cda3 req-c3ea5233-1856-4f37-9584-bfc2d82f088c service nova] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Received unexpected event network-vif-plugged-c12d6728-00ee-47e7-9fa8-92384e9f7a3c for instance with vm_state building and task_state spawning. [ 2116.385874] env[62684]: DEBUG nova.compute.manager [req-fe34ce92-af45-48f4-8dd0-d4be2c63cda3 req-c3ea5233-1856-4f37-9584-bfc2d82f088c service nova] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Received event network-vif-deleted-5f8a8ef1-7f32-48b8-9444-b5fc1ab25e68 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2116.386114] env[62684]: DEBUG nova.compute.manager [req-fe34ce92-af45-48f4-8dd0-d4be2c63cda3 req-c3ea5233-1856-4f37-9584-bfc2d82f088c service nova] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Received event network-changed-c12d6728-00ee-47e7-9fa8-92384e9f7a3c {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2116.386299] env[62684]: DEBUG nova.compute.manager [req-fe34ce92-af45-48f4-8dd0-d4be2c63cda3 req-c3ea5233-1856-4f37-9584-bfc2d82f088c service nova] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Refreshing instance network info cache due to event network-changed-c12d6728-00ee-47e7-9fa8-92384e9f7a3c. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2116.386480] env[62684]: DEBUG oslo_concurrency.lockutils [req-fe34ce92-af45-48f4-8dd0-d4be2c63cda3 req-c3ea5233-1856-4f37-9584-bfc2d82f088c service nova] Acquiring lock "refresh_cache-3a967adf-8c46-4787-b1d1-4ed701399576" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2116.458714] env[62684]: DEBUG oslo_vmware.api [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053307, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2116.547985] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053306, 'name': CreateVM_Task, 'duration_secs': 0.65318} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2116.548166] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2116.548892] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2116.549097] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2116.549427] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2116.549683] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9629fbdc-9c18-4adc-bce7-a9f1b5591946 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.554080] env[62684]: DEBUG oslo_vmware.api [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2116.554080] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ea8dc9-43c6-d4df-f042-06d7975fc56e" [ 2116.554080] env[62684]: _type = "Task" [ 2116.554080] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2116.561550] env[62684]: DEBUG oslo_vmware.api [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ea8dc9-43c6-d4df-f042-06d7975fc56e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2116.603893] env[62684]: DEBUG oslo_vmware.api [None req-31fc0b71-6d85-4d69-8e7a-40f1187f8ef0 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053308, 'name': PowerOffVM_Task, 'duration_secs': 0.319492} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2116.604181] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-31fc0b71-6d85-4d69-8e7a-40f1187f8ef0 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2116.604357] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-31fc0b71-6d85-4d69-8e7a-40f1187f8ef0 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2116.604610] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8b91991f-6d9b-4be8-a349-acec09f6396e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.616618] env[62684]: DEBUG nova.network.neutron [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2116.703506] env[62684]: DEBUG oslo_concurrency.lockutils [None req-acf55903-594a-4118-9746-2b073b6156af tempest-ServerShowV254Test-411536117 tempest-ServerShowV254Test-411536117-project-member] Lock "4cf48f05-d643-47e6-9a0b-33415d80890c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.867s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2116.744264] env[62684]: DEBUG nova.network.neutron [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Updating instance_info_cache with network_info: [{"id": "c12d6728-00ee-47e7-9fa8-92384e9f7a3c", "address": "fa:16:3e:50:c8:42", "network": {"id": "8eebb0b3-51e4-44c0-a4b4-b45647332a9e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-398889609-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "43304d5c52344bd9841dbc760a174b4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc12d6728-00", "ovs_interfaceid": "c12d6728-00ee-47e7-9fa8-92384e9f7a3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2116.824458] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: f44b2e88-af6d-4252-b562-9d5fa7745b56] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2116.851023] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6308299b-a785-4943-a283-a46c521cedea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Releasing lock "refresh_cache-81b7949d-be24-46c9-8dc8-c249b65bb039" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2116.860353] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Releasing lock "refresh_cache-e8c90faa-2c25-4308-9781-80d308b9211c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2116.860677] env[62684]: DEBUG nova.compute.manager [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Instance network_info: |[{"id": "f0464f82-ef8e-43bd-a863-de4b524e43c8", "address": "fa:16:3e:3a:6a:da", "network": {"id": "eb5671f2-a648-452e-a4c0-e81eb932b49c", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-524881696-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "359bcaa2eeb64bcbb6602062777b852e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "001929c7-0dc4-4b73-a9f1-d672f8377985", "external-id": "nsx-vlan-transportzone-230", "segmentation_id": 230, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0464f82-ef", "ovs_interfaceid": "f0464f82-ef8e-43bd-a863-de4b524e43c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2116.861773] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3a:6a:da', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '001929c7-0dc4-4b73-a9f1-d672f8377985', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f0464f82-ef8e-43bd-a863-de4b524e43c8', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2116.869230] env[62684]: DEBUG oslo.service.loopingcall [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2116.870127] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2116.870127] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5ad18a76-a50c-4c2a-b2b3-6b38b7487995 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.894714] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2116.894714] env[62684]: value = "task-2053310" [ 2116.894714] env[62684]: _type = "Task" [ 2116.894714] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2116.905354] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053310, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2116.910478] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-794e137e-342b-4e9b-bc1a-acb0590a4e5c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.916721] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c81fe35-b74f-44d3-9b89-fa3af59439c8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.954430] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3a69bd8-11bd-4ba0-8215-e2b5a53e9ba9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.962373] env[62684]: DEBUG oslo_vmware.api [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053307, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2116.965492] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0847ef0-4ecc-4fa7-a21e-80b79fed2fa3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.980016] env[62684]: DEBUG nova.compute.provider_tree [None req-a782d755-109e-4683-8c6e-c51d64e12d96 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2117.050256] env[62684]: DEBUG nova.compute.manager [req-54c3feb8-3325-4bd2-a090-35b5b4126ac8 req-f498332b-6c24-49b2-95cb-2d463c06d834 service nova] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Received event network-changed-f0464f82-ef8e-43bd-a863-de4b524e43c8 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2117.050442] env[62684]: DEBUG nova.compute.manager [req-54c3feb8-3325-4bd2-a090-35b5b4126ac8 req-f498332b-6c24-49b2-95cb-2d463c06d834 service nova] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Refreshing instance network info cache due to event network-changed-f0464f82-ef8e-43bd-a863-de4b524e43c8. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2117.050671] env[62684]: DEBUG oslo_concurrency.lockutils [req-54c3feb8-3325-4bd2-a090-35b5b4126ac8 req-f498332b-6c24-49b2-95cb-2d463c06d834 service nova] Acquiring lock "refresh_cache-e8c90faa-2c25-4308-9781-80d308b9211c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2117.050821] env[62684]: DEBUG oslo_concurrency.lockutils [req-54c3feb8-3325-4bd2-a090-35b5b4126ac8 req-f498332b-6c24-49b2-95cb-2d463c06d834 service nova] Acquired lock "refresh_cache-e8c90faa-2c25-4308-9781-80d308b9211c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2117.050990] env[62684]: DEBUG nova.network.neutron [req-54c3feb8-3325-4bd2-a090-35b5b4126ac8 req-f498332b-6c24-49b2-95cb-2d463c06d834 service nova] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Refreshing network info cache for port f0464f82-ef8e-43bd-a863-de4b524e43c8 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2117.065098] env[62684]: DEBUG oslo_vmware.api [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ea8dc9-43c6-d4df-f042-06d7975fc56e, 'name': SearchDatastore_Task, 'duration_secs': 0.019266} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2117.065383] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2117.065620] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2117.065873] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2117.066047] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2117.066238] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2117.066502] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2a790b71-fe0b-492e-bb29-2354aefe1d1f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.074962] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2117.075218] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2117.075947] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93e48a81-0869-414e-a853-a86d89a63541 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.081823] env[62684]: DEBUG oslo_vmware.api [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2117.081823] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52566f18-2382-e313-6449-93d6147cfd52" [ 2117.081823] env[62684]: _type = "Task" [ 2117.081823] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2117.088925] env[62684]: DEBUG oslo_vmware.api [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52566f18-2382-e313-6449-93d6147cfd52, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.183461] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-31fc0b71-6d85-4d69-8e7a-40f1187f8ef0 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2117.183699] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-31fc0b71-6d85-4d69-8e7a-40f1187f8ef0 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2117.183890] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-31fc0b71-6d85-4d69-8e7a-40f1187f8ef0 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Deleting the datastore file [datastore2] 81b7949d-be24-46c9-8dc8-c249b65bb039 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2117.184199] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-74d6c3e1-bf98-431c-9628-4fa10aa84e57 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.191086] env[62684]: DEBUG oslo_vmware.api [None req-31fc0b71-6d85-4d69-8e7a-40f1187f8ef0 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 2117.191086] env[62684]: value = "task-2053311" [ 2117.191086] env[62684]: _type = "Task" [ 2117.191086] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2117.199504] env[62684]: DEBUG oslo_vmware.api [None req-31fc0b71-6d85-4d69-8e7a-40f1187f8ef0 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053311, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.246646] env[62684]: DEBUG oslo_concurrency.lockutils [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Releasing lock "refresh_cache-3a967adf-8c46-4787-b1d1-4ed701399576" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2117.247259] env[62684]: DEBUG nova.compute.manager [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Instance network_info: |[{"id": "c12d6728-00ee-47e7-9fa8-92384e9f7a3c", "address": "fa:16:3e:50:c8:42", "network": {"id": "8eebb0b3-51e4-44c0-a4b4-b45647332a9e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-398889609-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "43304d5c52344bd9841dbc760a174b4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc12d6728-00", "ovs_interfaceid": "c12d6728-00ee-47e7-9fa8-92384e9f7a3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2117.247491] env[62684]: DEBUG oslo_concurrency.lockutils [req-fe34ce92-af45-48f4-8dd0-d4be2c63cda3 req-c3ea5233-1856-4f37-9584-bfc2d82f088c service nova] Acquired lock "refresh_cache-3a967adf-8c46-4787-b1d1-4ed701399576" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2117.247709] env[62684]: DEBUG nova.network.neutron [req-fe34ce92-af45-48f4-8dd0-d4be2c63cda3 req-c3ea5233-1856-4f37-9584-bfc2d82f088c service nova] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Refreshing network info cache for port c12d6728-00ee-47e7-9fa8-92384e9f7a3c {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2117.249087] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:50:c8:42', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c12d6728-00ee-47e7-9fa8-92384e9f7a3c', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2117.256400] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Creating folder: Project (43304d5c52344bd9841dbc760a174b4f). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2117.257494] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-82a02f3b-2a35-436c-a683-34458368ce7d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.268750] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Created folder: Project (43304d5c52344bd9841dbc760a174b4f) in parent group-v421118. [ 2117.268971] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Creating folder: Instances. Parent ref: group-v421348. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2117.269251] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3c0d442c-8f89-4561-9cab-1b8030438cde {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.279062] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Created folder: Instances in parent group-v421348. [ 2117.279354] env[62684]: DEBUG oslo.service.loopingcall [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2117.279568] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2117.279834] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6654c0bb-18d0-464a-8ed6-c13f015f541c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.299341] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2117.299341] env[62684]: value = "task-2053314" [ 2117.299341] env[62684]: _type = "Task" [ 2117.299341] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2117.306967] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053314, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.330039] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 8c046991-b294-4f33-9fce-a241984d66d7] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2117.355235] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6308299b-a785-4943-a283-a46c521cedea tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "interface-81b7949d-be24-46c9-8dc8-c249b65bb039-ff6434b2-d91e-43cc-b6f8-03cec921c38d" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 14.345s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2117.407519] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053310, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.459747] env[62684]: DEBUG oslo_vmware.api [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053307, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.482967] env[62684]: DEBUG nova.scheduler.client.report [None req-a782d755-109e-4683-8c6e-c51d64e12d96 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2117.592927] env[62684]: DEBUG oslo_vmware.api [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52566f18-2382-e313-6449-93d6147cfd52, 'name': SearchDatastore_Task, 'duration_secs': 0.011632} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2117.596131] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3d2ec4a-8e38-404c-84f1-938fab35c6d9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.602418] env[62684]: DEBUG oslo_vmware.api [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2117.602418] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520a9904-5339-ae48-9101-5ee57e590ae1" [ 2117.602418] env[62684]: _type = "Task" [ 2117.602418] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2117.611569] env[62684]: DEBUG oslo_vmware.api [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520a9904-5339-ae48-9101-5ee57e590ae1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.702603] env[62684]: DEBUG oslo_vmware.api [None req-31fc0b71-6d85-4d69-8e7a-40f1187f8ef0 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053311, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164881} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2117.703013] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-31fc0b71-6d85-4d69-8e7a-40f1187f8ef0 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2117.703126] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-31fc0b71-6d85-4d69-8e7a-40f1187f8ef0 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2117.703248] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-31fc0b71-6d85-4d69-8e7a-40f1187f8ef0 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2117.703428] env[62684]: INFO nova.compute.manager [None req-31fc0b71-6d85-4d69-8e7a-40f1187f8ef0 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Took 1.64 seconds to destroy the instance on the hypervisor. [ 2117.703678] env[62684]: DEBUG oslo.service.loopingcall [None req-31fc0b71-6d85-4d69-8e7a-40f1187f8ef0 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2117.703872] env[62684]: DEBUG nova.compute.manager [-] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2117.703966] env[62684]: DEBUG nova.network.neutron [-] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2117.775352] env[62684]: DEBUG neutronclient.v2_0.client [-] Error message: {"NeutronError": {"type": "PortNotFound", "message": "Port 5f8a8ef1-7f32-48b8-9444-b5fc1ab25e68 could not be found.", "detail": ""}} {{(pid=62684) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 2117.775580] env[62684]: DEBUG nova.network.neutron [-] Unable to show port 5f8a8ef1-7f32-48b8-9444-b5fc1ab25e68 as it no longer exists. {{(pid=62684) _unbind_ports /opt/stack/nova/nova/network/neutron.py:666}} [ 2117.808975] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053314, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.821916] env[62684]: DEBUG nova.network.neutron [req-54c3feb8-3325-4bd2-a090-35b5b4126ac8 req-f498332b-6c24-49b2-95cb-2d463c06d834 service nova] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Updated VIF entry in instance network info cache for port f0464f82-ef8e-43bd-a863-de4b524e43c8. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2117.822313] env[62684]: DEBUG nova.network.neutron [req-54c3feb8-3325-4bd2-a090-35b5b4126ac8 req-f498332b-6c24-49b2-95cb-2d463c06d834 service nova] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Updating instance_info_cache with network_info: [{"id": "f0464f82-ef8e-43bd-a863-de4b524e43c8", "address": "fa:16:3e:3a:6a:da", "network": {"id": "eb5671f2-a648-452e-a4c0-e81eb932b49c", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-524881696-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "359bcaa2eeb64bcbb6602062777b852e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "001929c7-0dc4-4b73-a9f1-d672f8377985", "external-id": "nsx-vlan-transportzone-230", "segmentation_id": 230, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0464f82-ef", "ovs_interfaceid": "f0464f82-ef8e-43bd-a863-de4b524e43c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2117.838487] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 4e5152b0-7bac-4dc2-b6c7-6590fa2d5978] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2117.911437] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053310, 'name': CreateVM_Task, 'duration_secs': 0.724272} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2117.911653] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2117.912337] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2117.912572] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2117.912913] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2117.913222] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06c01f07-1b3f-4f7e-ad17-895e8ddff3e2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.917623] env[62684]: DEBUG oslo_vmware.api [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for the task: (returnval){ [ 2117.917623] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]528d568a-1380-d211-94bc-978ca22645f3" [ 2117.917623] env[62684]: _type = "Task" [ 2117.917623] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2117.925152] env[62684]: DEBUG oslo_vmware.api [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]528d568a-1380-d211-94bc-978ca22645f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.960490] env[62684]: DEBUG oslo_vmware.api [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053307, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.519577} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2117.960805] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf/8cc68353-4678-4ee7-8c0d-3d71e6bf05bf.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2117.961043] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2117.961306] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d59b03b2-5049-467a-b881-ffe4169cda13 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.967352] env[62684]: DEBUG oslo_vmware.api [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2117.967352] env[62684]: value = "task-2053315" [ 2117.967352] env[62684]: _type = "Task" [ 2117.967352] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2117.973235] env[62684]: DEBUG nova.network.neutron [req-fe34ce92-af45-48f4-8dd0-d4be2c63cda3 req-c3ea5233-1856-4f37-9584-bfc2d82f088c service nova] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Updated VIF entry in instance network info cache for port c12d6728-00ee-47e7-9fa8-92384e9f7a3c. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2117.973649] env[62684]: DEBUG nova.network.neutron [req-fe34ce92-af45-48f4-8dd0-d4be2c63cda3 req-c3ea5233-1856-4f37-9584-bfc2d82f088c service nova] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Updating instance_info_cache with network_info: [{"id": "c12d6728-00ee-47e7-9fa8-92384e9f7a3c", "address": "fa:16:3e:50:c8:42", "network": {"id": "8eebb0b3-51e4-44c0-a4b4-b45647332a9e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-398889609-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "43304d5c52344bd9841dbc760a174b4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc12d6728-00", "ovs_interfaceid": "c12d6728-00ee-47e7-9fa8-92384e9f7a3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2117.978138] env[62684]: DEBUG oslo_vmware.api [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053315, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.988212] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a782d755-109e-4683-8c6e-c51d64e12d96 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.818s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2117.990706] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.667s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2117.992433] env[62684]: INFO nova.compute.claims [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2118.012468] env[62684]: INFO nova.scheduler.client.report [None req-a782d755-109e-4683-8c6e-c51d64e12d96 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Deleted allocations for instance 23578214-6708-43ae-88ce-56212083532a [ 2118.117541] env[62684]: DEBUG oslo_vmware.api [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520a9904-5339-ae48-9101-5ee57e590ae1, 'name': SearchDatastore_Task, 'duration_secs': 0.010155} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2118.118472] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2118.118960] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 28886f7c-6efc-4505-84f6-682d75cea215/28886f7c-6efc-4505-84f6-682d75cea215.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2118.119299] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8422d261-4eb2-4eef-8b44-dfe16e26e588 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.129889] env[62684]: DEBUG oslo_vmware.api [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2118.129889] env[62684]: value = "task-2053316" [ 2118.129889] env[62684]: _type = "Task" [ 2118.129889] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2118.137213] env[62684]: DEBUG oslo_vmware.api [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053316, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2118.313754] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053314, 'name': CreateVM_Task, 'duration_secs': 0.515885} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2118.314360] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2118.315412] env[62684]: DEBUG oslo_concurrency.lockutils [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2118.325289] env[62684]: DEBUG oslo_concurrency.lockutils [req-54c3feb8-3325-4bd2-a090-35b5b4126ac8 req-f498332b-6c24-49b2-95cb-2d463c06d834 service nova] Releasing lock "refresh_cache-e8c90faa-2c25-4308-9781-80d308b9211c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2118.342831] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: a9dfeb4d-a92e-41cf-9d2f-43086cc9e868] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2118.415531] env[62684]: DEBUG nova.compute.manager [req-35585f44-593e-498e-972e-3764a6c21482 req-18036edd-ce6e-45ac-8c3f-3183c390fef4 service nova] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Received event network-vif-plugged-f01f26d4-f4bb-4d9a-b6dd-3451ffb353bb {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2118.416327] env[62684]: DEBUG oslo_concurrency.lockutils [req-35585f44-593e-498e-972e-3764a6c21482 req-18036edd-ce6e-45ac-8c3f-3183c390fef4 service nova] Acquiring lock "2eab4a07-9b92-436e-b4f8-fa64ae949b56-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2118.416389] env[62684]: DEBUG oslo_concurrency.lockutils [req-35585f44-593e-498e-972e-3764a6c21482 req-18036edd-ce6e-45ac-8c3f-3183c390fef4 service nova] Lock "2eab4a07-9b92-436e-b4f8-fa64ae949b56-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2118.416776] env[62684]: DEBUG oslo_concurrency.lockutils [req-35585f44-593e-498e-972e-3764a6c21482 req-18036edd-ce6e-45ac-8c3f-3183c390fef4 service nova] Lock "2eab4a07-9b92-436e-b4f8-fa64ae949b56-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2118.416983] env[62684]: DEBUG nova.compute.manager [req-35585f44-593e-498e-972e-3764a6c21482 req-18036edd-ce6e-45ac-8c3f-3183c390fef4 service nova] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] No waiting events found dispatching network-vif-plugged-f01f26d4-f4bb-4d9a-b6dd-3451ffb353bb {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2118.417250] env[62684]: WARNING nova.compute.manager [req-35585f44-593e-498e-972e-3764a6c21482 req-18036edd-ce6e-45ac-8c3f-3183c390fef4 service nova] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Received unexpected event network-vif-plugged-f01f26d4-f4bb-4d9a-b6dd-3451ffb353bb for instance with vm_state building and task_state spawning. [ 2118.432909] env[62684]: DEBUG oslo_vmware.api [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]528d568a-1380-d211-94bc-978ca22645f3, 'name': SearchDatastore_Task, 'duration_secs': 0.009663} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2118.433292] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2118.433568] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2118.433836] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2118.434022] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2118.434632] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2118.435000] env[62684]: DEBUG oslo_concurrency.lockutils [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2118.435373] env[62684]: DEBUG oslo_concurrency.lockutils [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2118.435638] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-68d33b02-f7c9-43ff-81ae-12003a4e3055 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.438859] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19b5c74c-e878-4759-9bfd-cf11619bffcf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.445747] env[62684]: DEBUG oslo_vmware.api [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for the task: (returnval){ [ 2118.445747] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52736245-5387-50c3-9f41-b1dec1118461" [ 2118.445747] env[62684]: _type = "Task" [ 2118.445747] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2118.451361] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2118.451547] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2118.453968] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-148887cc-c11c-4554-a899-1f26829e4ec0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.460778] env[62684]: DEBUG oslo_vmware.api [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52736245-5387-50c3-9f41-b1dec1118461, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2118.465191] env[62684]: DEBUG oslo_vmware.api [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for the task: (returnval){ [ 2118.465191] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e48a89-16d3-79e3-1b93-9dfe437e229a" [ 2118.465191] env[62684]: _type = "Task" [ 2118.465191] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2118.478460] env[62684]: DEBUG oslo_vmware.api [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e48a89-16d3-79e3-1b93-9dfe437e229a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2118.481667] env[62684]: DEBUG oslo_concurrency.lockutils [req-fe34ce92-af45-48f4-8dd0-d4be2c63cda3 req-c3ea5233-1856-4f37-9584-bfc2d82f088c service nova] Releasing lock "refresh_cache-3a967adf-8c46-4787-b1d1-4ed701399576" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2118.482292] env[62684]: DEBUG oslo_vmware.api [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053315, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063346} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2118.482985] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2118.483311] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffc9215f-d521-458f-ab29-2ce633b7a423 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.510873] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf/8cc68353-4678-4ee7-8c0d-3d71e6bf05bf.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2118.511999] env[62684]: DEBUG nova.network.neutron [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Successfully updated port: f01f26d4-f4bb-4d9a-b6dd-3451ffb353bb {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2118.513466] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-530cc2e4-cf97-4bbf-834d-688db6f7f812 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.530089] env[62684]: DEBUG oslo_concurrency.lockutils [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Acquiring lock "refresh_cache-2eab4a07-9b92-436e-b4f8-fa64ae949b56" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2118.530295] env[62684]: DEBUG oslo_concurrency.lockutils [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Acquired lock "refresh_cache-2eab4a07-9b92-436e-b4f8-fa64ae949b56" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2118.530460] env[62684]: DEBUG nova.network.neutron [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2118.534462] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a782d755-109e-4683-8c6e-c51d64e12d96 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "23578214-6708-43ae-88ce-56212083532a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.016s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2118.539411] env[62684]: DEBUG oslo_vmware.api [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2118.539411] env[62684]: value = "task-2053317" [ 2118.539411] env[62684]: _type = "Task" [ 2118.539411] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2118.550756] env[62684]: DEBUG oslo_vmware.api [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053317, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2118.640032] env[62684]: DEBUG oslo_vmware.api [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053316, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.447147} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2118.640032] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 28886f7c-6efc-4505-84f6-682d75cea215/28886f7c-6efc-4505-84f6-682d75cea215.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2118.640032] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2118.640032] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-545231c2-3ba8-4079-ad68-86194502ebbe {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.645712] env[62684]: DEBUG oslo_vmware.api [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2118.645712] env[62684]: value = "task-2053318" [ 2118.645712] env[62684]: _type = "Task" [ 2118.645712] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2118.656018] env[62684]: DEBUG oslo_vmware.api [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053318, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2118.846862] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: ca22ca59-1b60-46f0-ae83-03ed4002fa0d] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2118.956318] env[62684]: DEBUG oslo_vmware.api [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52736245-5387-50c3-9f41-b1dec1118461, 'name': SearchDatastore_Task, 'duration_secs': 0.061568} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2118.957049] env[62684]: DEBUG oslo_concurrency.lockutils [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2118.957049] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2118.957291] env[62684]: DEBUG oslo_concurrency.lockutils [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2118.975279] env[62684]: DEBUG oslo_vmware.api [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e48a89-16d3-79e3-1b93-9dfe437e229a, 'name': SearchDatastore_Task, 'duration_secs': 0.051704} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2118.976141] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b4ac779-d49c-44b8-bb89-2df09b27a6e4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.982686] env[62684]: DEBUG oslo_vmware.api [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for the task: (returnval){ [ 2118.982686] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]522bb90b-e326-7ba9-c7e6-db46406f28f6" [ 2118.982686] env[62684]: _type = "Task" [ 2118.982686] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2118.991727] env[62684]: DEBUG oslo_vmware.api [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]522bb90b-e326-7ba9-c7e6-db46406f28f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2119.048729] env[62684]: DEBUG oslo_vmware.api [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053317, 'name': ReconfigVM_Task, 'duration_secs': 0.299962} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2119.051244] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Reconfigured VM instance instance-00000052 to attach disk [datastore2] 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf/8cc68353-4678-4ee7-8c0d-3d71e6bf05bf.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2119.052048] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8a391f44-c1c0-4b88-94a3-12e137d8dd9d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.059741] env[62684]: DEBUG oslo_vmware.api [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2119.059741] env[62684]: value = "task-2053319" [ 2119.059741] env[62684]: _type = "Task" [ 2119.059741] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2119.066224] env[62684]: DEBUG nova.network.neutron [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2119.071406] env[62684]: DEBUG oslo_vmware.api [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053319, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2119.156820] env[62684]: DEBUG oslo_vmware.api [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053318, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059937} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2119.159500] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2119.163583] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-794f45ec-0856-4eed-92a5-371991427c9b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.189389] env[62684]: DEBUG nova.network.neutron [-] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2119.197320] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] 28886f7c-6efc-4505-84f6-682d75cea215/28886f7c-6efc-4505-84f6-682d75cea215.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2119.200503] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b2ad98e2-0bb4-4714-b89f-b381bd9b9acf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.221153] env[62684]: DEBUG oslo_vmware.api [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2119.221153] env[62684]: value = "task-2053320" [ 2119.221153] env[62684]: _type = "Task" [ 2119.221153] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2119.235824] env[62684]: DEBUG oslo_vmware.api [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053320, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2119.297262] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-373d5e09-ed12-47e0-919b-0f312a1f0946 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.305512] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36cd4752-6826-41f6-b00a-e08dca821aa5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.341900] env[62684]: DEBUG nova.network.neutron [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Updating instance_info_cache with network_info: [{"id": "f01f26d4-f4bb-4d9a-b6dd-3451ffb353bb", "address": "fa:16:3e:3e:69:fc", "network": {"id": "93d9c499-7c0b-4ec0-9954-2850fef7001d", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1285503734-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70dbc7d95c2e4e23b1748be1626087c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf01f26d4-f4", "ovs_interfaceid": "f01f26d4-f4bb-4d9a-b6dd-3451ffb353bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2119.344896] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b015f85f-6749-4c5f-9241-ac31750274ad {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.355024] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: ab2c7cbe-6f46-4174-bffb-055a15f2d56b] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2119.360266] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de64491d-5a09-48b5-853d-7d07ad24c553 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.376466] env[62684]: DEBUG nova.compute.provider_tree [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2119.492387] env[62684]: DEBUG oslo_vmware.api [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]522bb90b-e326-7ba9-c7e6-db46406f28f6, 'name': SearchDatastore_Task, 'duration_secs': 0.009674} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2119.492664] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2119.493105] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] e8c90faa-2c25-4308-9781-80d308b9211c/e8c90faa-2c25-4308-9781-80d308b9211c.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2119.493367] env[62684]: DEBUG oslo_concurrency.lockutils [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2119.493467] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2119.493690] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a18b8f3f-d332-443f-a24c-0db732d78469 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.495587] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b36a2dc8-9a85-44d5-ac0a-5aae7152c1cd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.502517] env[62684]: DEBUG oslo_vmware.api [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for the task: (returnval){ [ 2119.502517] env[62684]: value = "task-2053321" [ 2119.502517] env[62684]: _type = "Task" [ 2119.502517] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2119.506367] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2119.506552] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2119.507259] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58ca88df-6b90-4243-add0-dc9e5e01276c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.513922] env[62684]: DEBUG oslo_vmware.api [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053321, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2119.515130] env[62684]: DEBUG oslo_vmware.api [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for the task: (returnval){ [ 2119.515130] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5287db0b-effa-ba80-22ac-6c612fcbf615" [ 2119.515130] env[62684]: _type = "Task" [ 2119.515130] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2119.522351] env[62684]: DEBUG oslo_vmware.api [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5287db0b-effa-ba80-22ac-6c612fcbf615, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2119.568733] env[62684]: DEBUG oslo_vmware.api [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053319, 'name': Rename_Task, 'duration_secs': 0.299296} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2119.568986] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2119.569260] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6b9c9fb8-7426-49a4-94c3-df4d10b57c68 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.574605] env[62684]: DEBUG oslo_vmware.api [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2119.574605] env[62684]: value = "task-2053322" [ 2119.574605] env[62684]: _type = "Task" [ 2119.574605] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2119.581795] env[62684]: DEBUG oslo_vmware.api [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053322, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2119.701763] env[62684]: INFO nova.compute.manager [-] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Took 2.00 seconds to deallocate network for instance. [ 2119.733859] env[62684]: DEBUG oslo_vmware.api [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053320, 'name': ReconfigVM_Task, 'duration_secs': 0.280069} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2119.734179] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Reconfigured VM instance instance-00000051 to attach disk [datastore2] 28886f7c-6efc-4505-84f6-682d75cea215/28886f7c-6efc-4505-84f6-682d75cea215.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2119.735496] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2d1cfd50-942c-4515-abd5-0f347d611e2f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.742722] env[62684]: DEBUG oslo_vmware.api [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2119.742722] env[62684]: value = "task-2053323" [ 2119.742722] env[62684]: _type = "Task" [ 2119.742722] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2119.753495] env[62684]: DEBUG oslo_vmware.api [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053323, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2119.851987] env[62684]: DEBUG oslo_concurrency.lockutils [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Releasing lock "refresh_cache-2eab4a07-9b92-436e-b4f8-fa64ae949b56" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2119.852358] env[62684]: DEBUG nova.compute.manager [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Instance network_info: |[{"id": "f01f26d4-f4bb-4d9a-b6dd-3451ffb353bb", "address": "fa:16:3e:3e:69:fc", "network": {"id": "93d9c499-7c0b-4ec0-9954-2850fef7001d", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1285503734-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70dbc7d95c2e4e23b1748be1626087c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf01f26d4-f4", "ovs_interfaceid": "f01f26d4-f4bb-4d9a-b6dd-3451ffb353bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2119.852817] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3e:69:fc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24144f5a-050a-4f1e-8d8c-774dc16dc791', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f01f26d4-f4bb-4d9a-b6dd-3451ffb353bb', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2119.860492] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Creating folder: Project (70dbc7d95c2e4e23b1748be1626087c0). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2119.860978] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 17d30180-9770-4329-a6d8-757a93514a96] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2119.863073] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4eb7c243-e814-40bc-807e-f4169255e95f tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "42ae6edd-e1f5-4ef8-a248-8f02e94d798e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2119.864261] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4eb7c243-e814-40bc-807e-f4169255e95f tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "42ae6edd-e1f5-4ef8-a248-8f02e94d798e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2119.864261] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4eb7c243-e814-40bc-807e-f4169255e95f tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "42ae6edd-e1f5-4ef8-a248-8f02e94d798e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2119.864261] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4eb7c243-e814-40bc-807e-f4169255e95f tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "42ae6edd-e1f5-4ef8-a248-8f02e94d798e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2119.864261] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4eb7c243-e814-40bc-807e-f4169255e95f tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "42ae6edd-e1f5-4ef8-a248-8f02e94d798e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2119.865331] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7764503e-a7d9-4f5f-b7de-950b1ddade57 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.868041] env[62684]: INFO nova.compute.manager [None req-4eb7c243-e814-40bc-807e-f4169255e95f tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Terminating instance [ 2119.870097] env[62684]: DEBUG nova.compute.manager [None req-4eb7c243-e814-40bc-807e-f4169255e95f tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2119.870309] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4eb7c243-e814-40bc-807e-f4169255e95f tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2119.871811] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c22ea727-d90e-45f3-aade-071a18db0c50 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.883344] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eb7c243-e814-40bc-807e-f4169255e95f tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2119.885093] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f6350b02-8a5e-4ae8-a5e7-dfc9234872a2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.886841] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Created folder: Project (70dbc7d95c2e4e23b1748be1626087c0) in parent group-v421118. [ 2119.887337] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Creating folder: Instances. Parent ref: group-v421351. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2119.888392] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ded766f3-1049-48a0-850b-ca684531c2e9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.895956] env[62684]: DEBUG oslo_vmware.api [None req-4eb7c243-e814-40bc-807e-f4169255e95f tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2119.895956] env[62684]: value = "task-2053325" [ 2119.895956] env[62684]: _type = "Task" [ 2119.895956] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2119.901238] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Created folder: Instances in parent group-v421351. [ 2119.901568] env[62684]: DEBUG oslo.service.loopingcall [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2119.902260] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2119.902560] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6a5bce01-09af-4970-99e1-52e2a9df9e54 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.918682] env[62684]: DEBUG nova.scheduler.client.report [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Updated inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f with generation 123 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2119.918957] env[62684]: DEBUG nova.compute.provider_tree [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Updating resource provider c23c281e-ec1f-4876-972e-a98655f2084f generation from 123 to 124 during operation: update_inventory {{(pid=62684) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2119.919161] env[62684]: DEBUG nova.compute.provider_tree [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2119.926519] env[62684]: DEBUG oslo_vmware.api [None req-4eb7c243-e814-40bc-807e-f4169255e95f tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053325, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2119.932205] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2119.932205] env[62684]: value = "task-2053327" [ 2119.932205] env[62684]: _type = "Task" [ 2119.932205] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2119.943431] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053327, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2120.013226] env[62684]: DEBUG oslo_vmware.api [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053321, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.505764} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2120.013483] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] e8c90faa-2c25-4308-9781-80d308b9211c/e8c90faa-2c25-4308-9781-80d308b9211c.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2120.014161] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2120.014161] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-389f3f44-fb24-402c-9631-e97fad9f3b0f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.026792] env[62684]: DEBUG oslo_vmware.api [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5287db0b-effa-ba80-22ac-6c612fcbf615, 'name': SearchDatastore_Task, 'duration_secs': 0.008872} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2120.029315] env[62684]: DEBUG oslo_vmware.api [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for the task: (returnval){ [ 2120.029315] env[62684]: value = "task-2053328" [ 2120.029315] env[62684]: _type = "Task" [ 2120.029315] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2120.029603] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a275048a-883d-4d03-9eb6-2a5cc1377612 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.041368] env[62684]: DEBUG oslo_vmware.api [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for the task: (returnval){ [ 2120.041368] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52af8c30-2da4-a37b-562e-a711f9283eef" [ 2120.041368] env[62684]: _type = "Task" [ 2120.041368] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2120.045592] env[62684]: DEBUG oslo_vmware.api [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053328, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2120.054849] env[62684]: DEBUG oslo_vmware.api [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52af8c30-2da4-a37b-562e-a711f9283eef, 'name': SearchDatastore_Task, 'duration_secs': 0.011505} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2120.055172] env[62684]: DEBUG oslo_concurrency.lockutils [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2120.055447] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 3a967adf-8c46-4787-b1d1-4ed701399576/3a967adf-8c46-4787-b1d1-4ed701399576.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2120.055726] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-399d63fa-8613-448d-9329-3feecd1d38a1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.061975] env[62684]: DEBUG oslo_vmware.api [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for the task: (returnval){ [ 2120.061975] env[62684]: value = "task-2053329" [ 2120.061975] env[62684]: _type = "Task" [ 2120.061975] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2120.069795] env[62684]: DEBUG oslo_vmware.api [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053329, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2120.083766] env[62684]: DEBUG oslo_vmware.api [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053322, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2120.208750] env[62684]: DEBUG oslo_concurrency.lockutils [None req-31fc0b71-6d85-4d69-8e7a-40f1187f8ef0 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2120.252991] env[62684]: DEBUG oslo_vmware.api [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053323, 'name': Rename_Task, 'duration_secs': 0.246767} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2120.253288] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2120.253589] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a3f1925f-d7d8-4991-9dce-0b2faba5440e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.260171] env[62684]: DEBUG oslo_vmware.api [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2120.260171] env[62684]: value = "task-2053330" [ 2120.260171] env[62684]: _type = "Task" [ 2120.260171] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2120.268739] env[62684]: DEBUG oslo_vmware.api [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053330, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2120.368696] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 0dbd52ac-c987-4728-974e-73e99465c5e7] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2120.412615] env[62684]: DEBUG oslo_vmware.api [None req-4eb7c243-e814-40bc-807e-f4169255e95f tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053325, 'name': PowerOffVM_Task, 'duration_secs': 0.190523} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2120.413041] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eb7c243-e814-40bc-807e-f4169255e95f tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2120.413301] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4eb7c243-e814-40bc-807e-f4169255e95f tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2120.413672] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6da6ca96-a496-41f9-aa66-b7ddb64d058f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.428450] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.438s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2120.429195] env[62684]: DEBUG nova.compute.manager [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2120.433282] env[62684]: DEBUG oslo_concurrency.lockutils [None req-31fc0b71-6d85-4d69-8e7a-40f1187f8ef0 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.225s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2120.433631] env[62684]: DEBUG nova.objects.instance [None req-31fc0b71-6d85-4d69-8e7a-40f1187f8ef0 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lazy-loading 'resources' on Instance uuid 81b7949d-be24-46c9-8dc8-c249b65bb039 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2120.447389] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053327, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2120.450157] env[62684]: DEBUG nova.compute.manager [req-0a0af9b6-cbb5-4677-8129-833c9016db9a req-4310eef6-3207-4fad-a319-a8a0f713ddfe service nova] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Received event network-changed-f01f26d4-f4bb-4d9a-b6dd-3451ffb353bb {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2120.450366] env[62684]: DEBUG nova.compute.manager [req-0a0af9b6-cbb5-4677-8129-833c9016db9a req-4310eef6-3207-4fad-a319-a8a0f713ddfe service nova] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Refreshing instance network info cache due to event network-changed-f01f26d4-f4bb-4d9a-b6dd-3451ffb353bb. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2120.450594] env[62684]: DEBUG oslo_concurrency.lockutils [req-0a0af9b6-cbb5-4677-8129-833c9016db9a req-4310eef6-3207-4fad-a319-a8a0f713ddfe service nova] Acquiring lock "refresh_cache-2eab4a07-9b92-436e-b4f8-fa64ae949b56" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2120.450743] env[62684]: DEBUG oslo_concurrency.lockutils [req-0a0af9b6-cbb5-4677-8129-833c9016db9a req-4310eef6-3207-4fad-a319-a8a0f713ddfe service nova] Acquired lock "refresh_cache-2eab4a07-9b92-436e-b4f8-fa64ae949b56" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2120.450910] env[62684]: DEBUG nova.network.neutron [req-0a0af9b6-cbb5-4677-8129-833c9016db9a req-4310eef6-3207-4fad-a319-a8a0f713ddfe service nova] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Refreshing network info cache for port f01f26d4-f4bb-4d9a-b6dd-3451ffb353bb {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2120.542865] env[62684]: DEBUG oslo_vmware.api [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053328, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072402} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2120.543303] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2120.544649] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b4c1f37-2626-4d3a-a973-2b1599420a90 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.569024] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Reconfiguring VM instance instance-00000053 to attach disk [datastore2] e8c90faa-2c25-4308-9781-80d308b9211c/e8c90faa-2c25-4308-9781-80d308b9211c.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2120.569024] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-532f4b43-9b50-454e-9703-a4c492872e3b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.596445] env[62684]: DEBUG oslo_vmware.api [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053322, 'name': PowerOnVM_Task, 'duration_secs': 0.66467} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2120.599389] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2120.599615] env[62684]: INFO nova.compute.manager [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Took 15.18 seconds to spawn the instance on the hypervisor. [ 2120.599805] env[62684]: DEBUG nova.compute.manager [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2120.600140] env[62684]: DEBUG oslo_vmware.api [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053329, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2120.600424] env[62684]: DEBUG oslo_vmware.api [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for the task: (returnval){ [ 2120.600424] env[62684]: value = "task-2053332" [ 2120.600424] env[62684]: _type = "Task" [ 2120.600424] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2120.601115] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8c96769-b0dc-4482-8a48-1daa632b807c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.611968] env[62684]: DEBUG oslo_vmware.api [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053332, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2120.726168] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4eb7c243-e814-40bc-807e-f4169255e95f tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2120.726523] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4eb7c243-e814-40bc-807e-f4169255e95f tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2120.726774] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-4eb7c243-e814-40bc-807e-f4169255e95f tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Deleting the datastore file [datastore1] 42ae6edd-e1f5-4ef8-a248-8f02e94d798e {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2120.727168] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7e3dbe5e-2dc3-4029-8a18-bc7d947dcbde {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.734602] env[62684]: DEBUG oslo_vmware.api [None req-4eb7c243-e814-40bc-807e-f4169255e95f tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2120.734602] env[62684]: value = "task-2053333" [ 2120.734602] env[62684]: _type = "Task" [ 2120.734602] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2120.742822] env[62684]: DEBUG oslo_vmware.api [None req-4eb7c243-e814-40bc-807e-f4169255e95f tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053333, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2120.770921] env[62684]: DEBUG oslo_vmware.api [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053330, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2120.872428] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 06751c34-0724-44ba-a263-ad27fcf2920f] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2120.937240] env[62684]: DEBUG nova.compute.utils [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2120.942853] env[62684]: DEBUG nova.compute.manager [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2120.943642] env[62684]: DEBUG nova.network.neutron [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2120.959032] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053327, 'name': CreateVM_Task, 'duration_secs': 0.785829} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2120.959530] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2120.960194] env[62684]: DEBUG oslo_concurrency.lockutils [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2120.960602] env[62684]: DEBUG oslo_concurrency.lockutils [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2120.960719] env[62684]: DEBUG oslo_concurrency.lockutils [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2120.961139] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba8bac72-6141-41c2-af3e-c7a7abf6d23c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.965980] env[62684]: DEBUG oslo_vmware.api [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Waiting for the task: (returnval){ [ 2120.965980] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]522a1f95-b1c0-0b50-390d-624172db6adf" [ 2120.965980] env[62684]: _type = "Task" [ 2120.965980] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2120.975831] env[62684]: DEBUG oslo_vmware.api [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]522a1f95-b1c0-0b50-390d-624172db6adf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2121.033175] env[62684]: DEBUG nova.policy [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2c49ec5346b34e49bb98c62ae08f8643', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b8afd480acb84db283976c13a9396c9d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2121.081487] env[62684]: DEBUG oslo_vmware.api [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053329, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2121.114103] env[62684]: DEBUG oslo_vmware.api [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053332, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2121.133550] env[62684]: INFO nova.compute.manager [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Took 27.79 seconds to build instance. [ 2121.189768] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-968ced1b-86bf-49e6-9d17-e5defcdc07ba {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.201394] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7250b7ea-c2cd-4140-b244-737a8055d47e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.236210] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-238c7776-29f8-48f3-8ce3-832348f2719a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.250952] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-727ae918-03e6-4454-867b-c3823507f238 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.254961] env[62684]: DEBUG oslo_vmware.api [None req-4eb7c243-e814-40bc-807e-f4169255e95f tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053333, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143819} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2121.255588] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-4eb7c243-e814-40bc-807e-f4169255e95f tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2121.255588] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4eb7c243-e814-40bc-807e-f4169255e95f tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2121.255588] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4eb7c243-e814-40bc-807e-f4169255e95f tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2121.255784] env[62684]: INFO nova.compute.manager [None req-4eb7c243-e814-40bc-807e-f4169255e95f tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Took 1.39 seconds to destroy the instance on the hypervisor. [ 2121.256226] env[62684]: DEBUG oslo.service.loopingcall [None req-4eb7c243-e814-40bc-807e-f4169255e95f tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2121.256785] env[62684]: DEBUG nova.compute.manager [-] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2121.256889] env[62684]: DEBUG nova.network.neutron [-] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2121.268539] env[62684]: DEBUG nova.compute.provider_tree [None req-31fc0b71-6d85-4d69-8e7a-40f1187f8ef0 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2121.277896] env[62684]: DEBUG oslo_vmware.api [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053330, 'name': PowerOnVM_Task, 'duration_secs': 0.826015} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2121.278130] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2121.278342] env[62684]: DEBUG nova.compute.manager [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2121.279075] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bee6ce13-7b1b-49e0-a3e0-3ed246bc0e7c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.370647] env[62684]: DEBUG nova.network.neutron [req-0a0af9b6-cbb5-4677-8129-833c9016db9a req-4310eef6-3207-4fad-a319-a8a0f713ddfe service nova] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Updated VIF entry in instance network info cache for port f01f26d4-f4bb-4d9a-b6dd-3451ffb353bb. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2121.371033] env[62684]: DEBUG nova.network.neutron [req-0a0af9b6-cbb5-4677-8129-833c9016db9a req-4310eef6-3207-4fad-a319-a8a0f713ddfe service nova] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Updating instance_info_cache with network_info: [{"id": "f01f26d4-f4bb-4d9a-b6dd-3451ffb353bb", "address": "fa:16:3e:3e:69:fc", "network": {"id": "93d9c499-7c0b-4ec0-9954-2850fef7001d", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1285503734-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70dbc7d95c2e4e23b1748be1626087c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf01f26d4-f4", "ovs_interfaceid": "f01f26d4-f4bb-4d9a-b6dd-3451ffb353bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2121.375570] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 5bc73032-45f9-4b5c-a4ea-e07c48e4f82b] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2121.447374] env[62684]: DEBUG nova.compute.manager [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2121.476977] env[62684]: DEBUG oslo_vmware.api [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]522a1f95-b1c0-0b50-390d-624172db6adf, 'name': SearchDatastore_Task, 'duration_secs': 0.009696} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2121.477891] env[62684]: DEBUG oslo_concurrency.lockutils [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2121.477891] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2121.477891] env[62684]: DEBUG oslo_concurrency.lockutils [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2121.478109] env[62684]: DEBUG oslo_concurrency.lockutils [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2121.478184] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2121.478479] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7061885b-35a1-475d-9f89-d86c7c62d39d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.488073] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2121.488321] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2121.489107] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7eb03e60-4cbf-41dd-b443-9e40f23e2552 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.495667] env[62684]: DEBUG oslo_vmware.api [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Waiting for the task: (returnval){ [ 2121.495667] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5234418d-73ff-e7a3-201a-b8e046296bee" [ 2121.495667] env[62684]: _type = "Task" [ 2121.495667] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2121.507720] env[62684]: DEBUG oslo_vmware.api [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5234418d-73ff-e7a3-201a-b8e046296bee, 'name': SearchDatastore_Task, 'duration_secs': 0.008158} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2121.507720] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03393ee7-ecef-4005-9bef-79aa71993d09 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.516950] env[62684]: DEBUG oslo_vmware.api [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Waiting for the task: (returnval){ [ 2121.516950] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52eefa34-93e1-1d83-6818-768f975fdd83" [ 2121.516950] env[62684]: _type = "Task" [ 2121.516950] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2121.529930] env[62684]: DEBUG oslo_vmware.api [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52eefa34-93e1-1d83-6818-768f975fdd83, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2121.580799] env[62684]: DEBUG oslo_vmware.api [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053329, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.475025} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2121.581079] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 3a967adf-8c46-4787-b1d1-4ed701399576/3a967adf-8c46-4787-b1d1-4ed701399576.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2121.581303] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2121.581596] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fea33d74-b644-4ceb-9af9-e3349c3dc379 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.589360] env[62684]: DEBUG oslo_vmware.api [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for the task: (returnval){ [ 2121.589360] env[62684]: value = "task-2053334" [ 2121.589360] env[62684]: _type = "Task" [ 2121.589360] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2121.597988] env[62684]: DEBUG oslo_vmware.api [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053334, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2121.600193] env[62684]: DEBUG nova.compute.manager [req-c0a58f06-fd3c-4fc2-a33e-6b4cbba2c054 req-0e500b92-25df-4651-a64e-763b0412baba service nova] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Received event network-vif-deleted-2fce07b0-060c-45c8-8466-125ebacf17b8 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2121.600507] env[62684]: INFO nova.compute.manager [req-c0a58f06-fd3c-4fc2-a33e-6b4cbba2c054 req-0e500b92-25df-4651-a64e-763b0412baba service nova] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Neutron deleted interface 2fce07b0-060c-45c8-8466-125ebacf17b8; detaching it from the instance and deleting it from the info cache [ 2121.600665] env[62684]: DEBUG nova.network.neutron [req-c0a58f06-fd3c-4fc2-a33e-6b4cbba2c054 req-0e500b92-25df-4651-a64e-763b0412baba service nova] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2121.602283] env[62684]: DEBUG nova.network.neutron [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Successfully created port: 77dcd033-4115-49dc-9ba1-8a05c4726df3 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2121.613512] env[62684]: DEBUG oslo_vmware.api [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053332, 'name': ReconfigVM_Task, 'duration_secs': 0.552094} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2121.613801] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Reconfigured VM instance instance-00000053 to attach disk [datastore2] e8c90faa-2c25-4308-9781-80d308b9211c/e8c90faa-2c25-4308-9781-80d308b9211c.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2121.614662] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-03531146-1be0-40cb-90b5-f84b2508c029 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.623809] env[62684]: DEBUG oslo_vmware.api [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for the task: (returnval){ [ 2121.623809] env[62684]: value = "task-2053335" [ 2121.623809] env[62684]: _type = "Task" [ 2121.623809] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2121.633815] env[62684]: DEBUG oslo_vmware.api [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053335, 'name': Rename_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2121.635397] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a8d340f2-eb9f-4a81-b6d3-2fdb5e71b4e5 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "8cc68353-4678-4ee7-8c0d-3d71e6bf05bf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.302s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2121.803356] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2121.803356] env[62684]: DEBUG nova.scheduler.client.report [None req-31fc0b71-6d85-4d69-8e7a-40f1187f8ef0 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Updated inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f with generation 124 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2121.803356] env[62684]: DEBUG nova.compute.provider_tree [None req-31fc0b71-6d85-4d69-8e7a-40f1187f8ef0 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Updating resource provider c23c281e-ec1f-4876-972e-a98655f2084f generation from 124 to 125 during operation: update_inventory {{(pid=62684) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2121.803356] env[62684]: DEBUG nova.compute.provider_tree [None req-31fc0b71-6d85-4d69-8e7a-40f1187f8ef0 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2121.874878] env[62684]: DEBUG oslo_concurrency.lockutils [req-0a0af9b6-cbb5-4677-8129-833c9016db9a req-4310eef6-3207-4fad-a319-a8a0f713ddfe service nova] Releasing lock "refresh_cache-2eab4a07-9b92-436e-b4f8-fa64ae949b56" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2121.875213] env[62684]: DEBUG nova.compute.manager [req-0a0af9b6-cbb5-4677-8129-833c9016db9a req-4310eef6-3207-4fad-a319-a8a0f713ddfe service nova] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Received event network-vif-deleted-fafc2062-9754-4ce0-8647-362b6bb8f8d7 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2121.878927] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: dcb0a5b2-379e-44ff-a9b0-be615943c94e] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2122.028507] env[62684]: DEBUG oslo_vmware.api [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52eefa34-93e1-1d83-6818-768f975fdd83, 'name': SearchDatastore_Task, 'duration_secs': 0.013} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2122.028924] env[62684]: DEBUG oslo_concurrency.lockutils [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2122.029260] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 2eab4a07-9b92-436e-b4f8-fa64ae949b56/2eab4a07-9b92-436e-b4f8-fa64ae949b56.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2122.029559] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8cfd1af6-49a0-402c-b2b4-40c5550de005 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.035994] env[62684]: DEBUG oslo_vmware.api [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Waiting for the task: (returnval){ [ 2122.035994] env[62684]: value = "task-2053336" [ 2122.035994] env[62684]: _type = "Task" [ 2122.035994] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2122.045531] env[62684]: DEBUG oslo_vmware.api [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Task: {'id': task-2053336, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2122.064686] env[62684]: DEBUG nova.network.neutron [-] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2122.098389] env[62684]: DEBUG oslo_vmware.api [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053334, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064579} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2122.098660] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2122.099493] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e2d127b-ee8a-42eb-9469-e3c75e796080 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.126241] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] 3a967adf-8c46-4787-b1d1-4ed701399576/3a967adf-8c46-4787-b1d1-4ed701399576.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2122.126565] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c92b9bd4-259e-4e91-9683-49f33ea595d9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.129605] env[62684]: DEBUG oslo_concurrency.lockutils [None req-34f28f61-e12c-4603-a1d1-a6950299c417 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "8cc68353-4678-4ee7-8c0d-3d71e6bf05bf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2122.129896] env[62684]: DEBUG oslo_concurrency.lockutils [None req-34f28f61-e12c-4603-a1d1-a6950299c417 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "8cc68353-4678-4ee7-8c0d-3d71e6bf05bf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2122.130139] env[62684]: DEBUG oslo_concurrency.lockutils [None req-34f28f61-e12c-4603-a1d1-a6950299c417 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "8cc68353-4678-4ee7-8c0d-3d71e6bf05bf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2122.130484] env[62684]: DEBUG oslo_concurrency.lockutils [None req-34f28f61-e12c-4603-a1d1-a6950299c417 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "8cc68353-4678-4ee7-8c0d-3d71e6bf05bf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2122.130728] env[62684]: DEBUG oslo_concurrency.lockutils [None req-34f28f61-e12c-4603-a1d1-a6950299c417 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "8cc68353-4678-4ee7-8c0d-3d71e6bf05bf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2122.132846] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f4c50b61-d1a8-4b13-bb7f-747fc6dc9e54 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.147148] env[62684]: INFO nova.compute.manager [None req-34f28f61-e12c-4603-a1d1-a6950299c417 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Terminating instance [ 2122.152159] env[62684]: DEBUG nova.compute.manager [None req-34f28f61-e12c-4603-a1d1-a6950299c417 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2122.152362] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-34f28f61-e12c-4603-a1d1-a6950299c417 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2122.153647] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-573202de-6cb5-4280-9548-0a01059b0bc7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.163517] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aaa8aa0-caff-41eb-99b6-f8030d3f7981 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.173423] env[62684]: DEBUG oslo_vmware.api [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053335, 'name': Rename_Task, 'duration_secs': 0.418077} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2122.176333] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2122.176498] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-34f28f61-e12c-4603-a1d1-a6950299c417 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2122.176735] env[62684]: DEBUG oslo_vmware.api [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for the task: (returnval){ [ 2122.176735] env[62684]: value = "task-2053337" [ 2122.176735] env[62684]: _type = "Task" [ 2122.176735] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2122.177232] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fe44a378-1853-439f-9b9a-d72ba38c2871 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.178560] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-37e45b0b-be1d-40b6-8df3-d85ba5535600 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.190108] env[62684]: DEBUG oslo_vmware.api [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053337, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2122.190414] env[62684]: DEBUG oslo_vmware.api [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for the task: (returnval){ [ 2122.190414] env[62684]: value = "task-2053339" [ 2122.190414] env[62684]: _type = "Task" [ 2122.190414] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2122.204281] env[62684]: DEBUG nova.compute.manager [req-c0a58f06-fd3c-4fc2-a33e-6b4cbba2c054 req-0e500b92-25df-4651-a64e-763b0412baba service nova] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Detach interface failed, port_id=2fce07b0-060c-45c8-8466-125ebacf17b8, reason: Instance 42ae6edd-e1f5-4ef8-a248-8f02e94d798e could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2122.204823] env[62684]: DEBUG oslo_vmware.api [None req-34f28f61-e12c-4603-a1d1-a6950299c417 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2122.204823] env[62684]: value = "task-2053338" [ 2122.204823] env[62684]: _type = "Task" [ 2122.204823] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2122.216470] env[62684]: DEBUG oslo_vmware.api [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053339, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2122.219724] env[62684]: DEBUG oslo_vmware.api [None req-34f28f61-e12c-4603-a1d1-a6950299c417 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053338, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2122.308936] env[62684]: DEBUG oslo_concurrency.lockutils [None req-31fc0b71-6d85-4d69-8e7a-40f1187f8ef0 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.876s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2122.312747] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.511s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2122.313065] env[62684]: DEBUG nova.objects.instance [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62684) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 2122.337196] env[62684]: INFO nova.scheduler.client.report [None req-31fc0b71-6d85-4d69-8e7a-40f1187f8ef0 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Deleted allocations for instance 81b7949d-be24-46c9-8dc8-c249b65bb039 [ 2122.382573] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 73f27fc0-ebae-41c7-b292-14396f79a5a2] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2122.457649] env[62684]: DEBUG nova.compute.manager [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2122.484007] env[62684]: DEBUG nova.virt.hardware [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2122.484357] env[62684]: DEBUG nova.virt.hardware [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2122.484593] env[62684]: DEBUG nova.virt.hardware [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2122.484830] env[62684]: DEBUG nova.virt.hardware [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2122.485029] env[62684]: DEBUG nova.virt.hardware [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2122.485234] env[62684]: DEBUG nova.virt.hardware [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2122.485501] env[62684]: DEBUG nova.virt.hardware [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2122.485702] env[62684]: DEBUG nova.virt.hardware [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2122.485940] env[62684]: DEBUG nova.virt.hardware [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2122.486144] env[62684]: DEBUG nova.virt.hardware [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2122.486330] env[62684]: DEBUG nova.virt.hardware [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2122.487204] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbede5ce-2092-4bd2-811b-22243c2dfaa1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.495115] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d42dc7e7-7d71-4a3d-843e-9d08e319a2ab {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.545157] env[62684]: DEBUG oslo_vmware.api [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Task: {'id': task-2053336, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2122.566763] env[62684]: INFO nova.compute.manager [-] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Took 1.31 seconds to deallocate network for instance. [ 2122.689812] env[62684]: DEBUG oslo_vmware.api [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053337, 'name': ReconfigVM_Task, 'duration_secs': 0.305251} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2122.690134] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Reconfigured VM instance instance-00000054 to attach disk [datastore2] 3a967adf-8c46-4787-b1d1-4ed701399576/3a967adf-8c46-4787-b1d1-4ed701399576.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2122.690808] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4fdd7597-fa79-4671-8b14-1113b48bca4f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.700683] env[62684]: DEBUG oslo_vmware.api [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for the task: (returnval){ [ 2122.700683] env[62684]: value = "task-2053340" [ 2122.700683] env[62684]: _type = "Task" [ 2122.700683] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2122.716409] env[62684]: DEBUG oslo_vmware.api [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053340, 'name': Rename_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2122.721317] env[62684]: DEBUG oslo_vmware.api [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053339, 'name': PowerOnVM_Task} progress is 95%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2122.726420] env[62684]: DEBUG oslo_vmware.api [None req-34f28f61-e12c-4603-a1d1-a6950299c417 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053338, 'name': PowerOffVM_Task, 'duration_secs': 0.173615} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2122.726683] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-34f28f61-e12c-4603-a1d1-a6950299c417 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2122.726925] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-34f28f61-e12c-4603-a1d1-a6950299c417 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2122.727289] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-07fdf7cf-4f73-462f-b675-f3fc69ebc859 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.845702] env[62684]: DEBUG oslo_concurrency.lockutils [None req-31fc0b71-6d85-4d69-8e7a-40f1187f8ef0 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "81b7949d-be24-46c9-8dc8-c249b65bb039" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.783s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2122.886086] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: d532b5fa-90a3-4f25-8684-4eabaa432c86] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2123.048854] env[62684]: DEBUG oslo_vmware.api [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Task: {'id': task-2053336, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2123.073597] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4eb7c243-e814-40bc-807e-f4169255e95f tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2123.095393] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5b595d12-5144-4cf8-9e3c-6cf06360aa3f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "28886f7c-6efc-4505-84f6-682d75cea215" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2123.095775] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5b595d12-5144-4cf8-9e3c-6cf06360aa3f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "28886f7c-6efc-4505-84f6-682d75cea215" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2123.096062] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5b595d12-5144-4cf8-9e3c-6cf06360aa3f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "28886f7c-6efc-4505-84f6-682d75cea215-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2123.096337] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5b595d12-5144-4cf8-9e3c-6cf06360aa3f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "28886f7c-6efc-4505-84f6-682d75cea215-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2123.096539] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5b595d12-5144-4cf8-9e3c-6cf06360aa3f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "28886f7c-6efc-4505-84f6-682d75cea215-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2123.098789] env[62684]: INFO nova.compute.manager [None req-5b595d12-5144-4cf8-9e3c-6cf06360aa3f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Terminating instance [ 2123.100524] env[62684]: DEBUG nova.compute.manager [None req-5b595d12-5144-4cf8-9e3c-6cf06360aa3f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2123.100724] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5b595d12-5144-4cf8-9e3c-6cf06360aa3f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2123.101582] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c8078aa-1b9c-4073-8472-8c6834adf920 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.108543] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b595d12-5144-4cf8-9e3c-6cf06360aa3f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2123.108814] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9d8985e2-1ee1-4232-bb1c-0b3fcb96f301 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.114794] env[62684]: DEBUG oslo_vmware.api [None req-5b595d12-5144-4cf8-9e3c-6cf06360aa3f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2123.114794] env[62684]: value = "task-2053342" [ 2123.114794] env[62684]: _type = "Task" [ 2123.114794] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2123.125152] env[62684]: DEBUG oslo_vmware.api [None req-5b595d12-5144-4cf8-9e3c-6cf06360aa3f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053342, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2123.212818] env[62684]: DEBUG oslo_vmware.api [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053340, 'name': Rename_Task, 'duration_secs': 0.222997} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2123.213528] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2123.214376] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9ba7aa1a-d70e-4e3d-adf8-2983bb34cb46 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.219461] env[62684]: DEBUG oslo_vmware.api [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053339, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2123.224608] env[62684]: DEBUG oslo_vmware.api [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for the task: (returnval){ [ 2123.224608] env[62684]: value = "task-2053343" [ 2123.224608] env[62684]: _type = "Task" [ 2123.224608] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2123.231733] env[62684]: DEBUG oslo_vmware.api [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053343, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2123.320859] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-34f28f61-e12c-4603-a1d1-a6950299c417 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2123.321137] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-34f28f61-e12c-4603-a1d1-a6950299c417 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2123.321333] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-34f28f61-e12c-4603-a1d1-a6950299c417 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Deleting the datastore file [datastore2] 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2123.322065] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-78176d1a-24fd-480a-9e34-4dd657a0c043 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.325586] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c152f356-a48a-4d1e-b0e0-04e27e1ca1da tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2123.327152] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4eb7c243-e814-40bc-807e-f4169255e95f tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.254s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2123.327849] env[62684]: DEBUG nova.objects.instance [None req-4eb7c243-e814-40bc-807e-f4169255e95f tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lazy-loading 'resources' on Instance uuid 42ae6edd-e1f5-4ef8-a248-8f02e94d798e {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2123.335524] env[62684]: DEBUG oslo_vmware.api [None req-34f28f61-e12c-4603-a1d1-a6950299c417 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2123.335524] env[62684]: value = "task-2053344" [ 2123.335524] env[62684]: _type = "Task" [ 2123.335524] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2123.347147] env[62684]: DEBUG oslo_vmware.api [None req-34f28f61-e12c-4603-a1d1-a6950299c417 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053344, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2123.390524] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 6b1f0e69-3915-40dc-b4ec-93ab174f12b6] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2123.546906] env[62684]: DEBUG oslo_vmware.api [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Task: {'id': task-2053336, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.056413} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2123.547188] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 2eab4a07-9b92-436e-b4f8-fa64ae949b56/2eab4a07-9b92-436e-b4f8-fa64ae949b56.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2123.547515] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2123.547659] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-73c70090-2e72-456c-879d-46655edf1e08 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.553482] env[62684]: DEBUG oslo_vmware.api [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Waiting for the task: (returnval){ [ 2123.553482] env[62684]: value = "task-2053345" [ 2123.553482] env[62684]: _type = "Task" [ 2123.553482] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2123.561103] env[62684]: DEBUG oslo_vmware.api [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Task: {'id': task-2053345, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2123.626259] env[62684]: DEBUG oslo_vmware.api [None req-5b595d12-5144-4cf8-9e3c-6cf06360aa3f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053342, 'name': PowerOffVM_Task, 'duration_secs': 0.229107} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2123.626559] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b595d12-5144-4cf8-9e3c-6cf06360aa3f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2123.626741] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5b595d12-5144-4cf8-9e3c-6cf06360aa3f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2123.627417] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4a45741f-65c7-40a1-ba56-c35517ae07cf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.716244] env[62684]: DEBUG oslo_vmware.api [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053339, 'name': PowerOnVM_Task, 'duration_secs': 1.052007} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2123.716522] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2123.716740] env[62684]: INFO nova.compute.manager [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Took 15.94 seconds to spawn the instance on the hypervisor. [ 2123.716966] env[62684]: DEBUG nova.compute.manager [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2123.718061] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eadb47cc-1865-4af6-84e7-3de0bdee7f70 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.741077] env[62684]: DEBUG oslo_vmware.api [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053343, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2123.822024] env[62684]: DEBUG nova.compute.manager [req-64f4bc49-6ccc-4044-8df7-518b56551512 req-b50bd014-db8e-49da-895d-50abcc212481 service nova] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Received event network-vif-plugged-77dcd033-4115-49dc-9ba1-8a05c4726df3 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2123.822024] env[62684]: DEBUG oslo_concurrency.lockutils [req-64f4bc49-6ccc-4044-8df7-518b56551512 req-b50bd014-db8e-49da-895d-50abcc212481 service nova] Acquiring lock "6b461482-0606-4af3-98a2-88c0318d1a69-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2123.822024] env[62684]: DEBUG oslo_concurrency.lockutils [req-64f4bc49-6ccc-4044-8df7-518b56551512 req-b50bd014-db8e-49da-895d-50abcc212481 service nova] Lock "6b461482-0606-4af3-98a2-88c0318d1a69-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2123.822024] env[62684]: DEBUG oslo_concurrency.lockutils [req-64f4bc49-6ccc-4044-8df7-518b56551512 req-b50bd014-db8e-49da-895d-50abcc212481 service nova] Lock "6b461482-0606-4af3-98a2-88c0318d1a69-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2123.822024] env[62684]: DEBUG nova.compute.manager [req-64f4bc49-6ccc-4044-8df7-518b56551512 req-b50bd014-db8e-49da-895d-50abcc212481 service nova] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] No waiting events found dispatching network-vif-plugged-77dcd033-4115-49dc-9ba1-8a05c4726df3 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2123.822024] env[62684]: WARNING nova.compute.manager [req-64f4bc49-6ccc-4044-8df7-518b56551512 req-b50bd014-db8e-49da-895d-50abcc212481 service nova] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Received unexpected event network-vif-plugged-77dcd033-4115-49dc-9ba1-8a05c4726df3 for instance with vm_state building and task_state spawning. [ 2123.844605] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "0a8d7c48-cf90-4baf-a900-38fbd62869a6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2123.844841] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "0a8d7c48-cf90-4baf-a900-38fbd62869a6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2123.849431] env[62684]: DEBUG oslo_vmware.api [None req-34f28f61-e12c-4603-a1d1-a6950299c417 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053344, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.123856} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2123.849853] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-34f28f61-e12c-4603-a1d1-a6950299c417 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2123.850072] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-34f28f61-e12c-4603-a1d1-a6950299c417 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2123.850391] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-34f28f61-e12c-4603-a1d1-a6950299c417 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2123.850391] env[62684]: INFO nova.compute.manager [None req-34f28f61-e12c-4603-a1d1-a6950299c417 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Took 1.70 seconds to destroy the instance on the hypervisor. [ 2123.850637] env[62684]: DEBUG oslo.service.loopingcall [None req-34f28f61-e12c-4603-a1d1-a6950299c417 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2123.850861] env[62684]: DEBUG nova.compute.manager [-] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2123.850954] env[62684]: DEBUG nova.network.neutron [-] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2123.898272] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: fb7f38a0-bcfa-4d96-bde3-20d6f1d70112] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2123.995949] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5b595d12-5144-4cf8-9e3c-6cf06360aa3f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2123.995949] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5b595d12-5144-4cf8-9e3c-6cf06360aa3f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2123.995949] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b595d12-5144-4cf8-9e3c-6cf06360aa3f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Deleting the datastore file [datastore2] 28886f7c-6efc-4505-84f6-682d75cea215 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2123.995949] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-78852853-3333-4ddc-b6b1-ff54c3ff05fc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.001748] env[62684]: DEBUG oslo_vmware.api [None req-5b595d12-5144-4cf8-9e3c-6cf06360aa3f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2124.001748] env[62684]: value = "task-2053347" [ 2124.001748] env[62684]: _type = "Task" [ 2124.001748] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2124.009125] env[62684]: DEBUG nova.network.neutron [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Successfully updated port: 77dcd033-4115-49dc-9ba1-8a05c4726df3 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2124.014732] env[62684]: DEBUG oslo_vmware.api [None req-5b595d12-5144-4cf8-9e3c-6cf06360aa3f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053347, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2124.065527] env[62684]: DEBUG oslo_vmware.api [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Task: {'id': task-2053345, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076359} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2124.065803] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2124.066598] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6e2134f-16ab-4801-8e85-5d95e89786b0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.089381] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Reconfiguring VM instance instance-00000055 to attach disk [datastore2] 2eab4a07-9b92-436e-b4f8-fa64ae949b56/2eab4a07-9b92-436e-b4f8-fa64ae949b56.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2124.092270] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b55832c2-2fbb-4b40-a5d1-eeef165366be {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.112544] env[62684]: DEBUG oslo_vmware.api [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Waiting for the task: (returnval){ [ 2124.112544] env[62684]: value = "task-2053348" [ 2124.112544] env[62684]: _type = "Task" [ 2124.112544] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2124.121270] env[62684]: DEBUG oslo_vmware.api [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Task: {'id': task-2053348, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2124.126659] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96084f41-94b2-4c0b-bfcb-ec2ec0659744 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.135144] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-736494b5-aec4-46b5-b8f0-781a2512bfca {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.174832] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e4ffb7c-3196-478a-8465-f31cdd7fc5b5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.183226] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f2ea1e7-218d-400f-938f-a77c76e839c9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.197755] env[62684]: DEBUG nova.compute.provider_tree [None req-4eb7c243-e814-40bc-807e-f4169255e95f tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2124.243367] env[62684]: DEBUG oslo_vmware.api [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053343, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2124.246975] env[62684]: INFO nova.compute.manager [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Took 30.26 seconds to build instance. [ 2124.350780] env[62684]: DEBUG nova.compute.manager [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2124.401390] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: effc673a-103f-413b-88ac-6907ad1ee852] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2124.423063] env[62684]: DEBUG nova.compute.manager [req-4b305937-f11f-4aeb-822b-a569f89dad25 req-f17fb27c-8f3d-4ec5-8bc5-d295368de91c service nova] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Received event network-vif-deleted-0d755b84-8780-4295-a8a6-032192e3c6f7 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2124.423305] env[62684]: INFO nova.compute.manager [req-4b305937-f11f-4aeb-822b-a569f89dad25 req-f17fb27c-8f3d-4ec5-8bc5-d295368de91c service nova] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Neutron deleted interface 0d755b84-8780-4295-a8a6-032192e3c6f7; detaching it from the instance and deleting it from the info cache [ 2124.423487] env[62684]: DEBUG nova.network.neutron [req-4b305937-f11f-4aeb-822b-a569f89dad25 req-f17fb27c-8f3d-4ec5-8bc5-d295368de91c service nova] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2124.511533] env[62684]: DEBUG oslo_vmware.api [None req-5b595d12-5144-4cf8-9e3c-6cf06360aa3f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053347, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.153148} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2124.511853] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b595d12-5144-4cf8-9e3c-6cf06360aa3f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2124.512080] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5b595d12-5144-4cf8-9e3c-6cf06360aa3f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2124.512303] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5b595d12-5144-4cf8-9e3c-6cf06360aa3f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2124.512489] env[62684]: INFO nova.compute.manager [None req-5b595d12-5144-4cf8-9e3c-6cf06360aa3f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Took 1.41 seconds to destroy the instance on the hypervisor. [ 2124.512730] env[62684]: DEBUG oslo.service.loopingcall [None req-5b595d12-5144-4cf8-9e3c-6cf06360aa3f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2124.512930] env[62684]: DEBUG nova.compute.manager [-] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2124.513865] env[62684]: DEBUG nova.network.neutron [-] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2124.515818] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Acquiring lock "refresh_cache-6b461482-0606-4af3-98a2-88c0318d1a69" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2124.515955] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Acquired lock "refresh_cache-6b461482-0606-4af3-98a2-88c0318d1a69" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2124.516129] env[62684]: DEBUG nova.network.neutron [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2124.624601] env[62684]: DEBUG oslo_vmware.api [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Task: {'id': task-2053348, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2124.702485] env[62684]: DEBUG nova.scheduler.client.report [None req-4eb7c243-e814-40bc-807e-f4169255e95f tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2124.735609] env[62684]: DEBUG oslo_vmware.api [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053343, 'name': PowerOnVM_Task, 'duration_secs': 1.104562} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2124.735948] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2124.736225] env[62684]: INFO nova.compute.manager [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Took 13.69 seconds to spawn the instance on the hypervisor. [ 2124.736414] env[62684]: DEBUG nova.compute.manager [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2124.737532] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d84f781-28df-4c12-95b1-9d644d5215f4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.749875] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4ccd44c7-a338-4036-b597-28e98ab52355 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Lock "e8c90faa-2c25-4308-9781-80d308b9211c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.775s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2124.763334] env[62684]: DEBUG nova.network.neutron [-] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2124.868587] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2124.905282] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 0676806b-c1f0-4c1a-a12d-add2edf1588f] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2124.927130] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d45bc470-67eb-42a0-bb69-278777c2acca {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.936564] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b85f115-ece8-4e0b-8eab-98c97f802fdf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.967063] env[62684]: DEBUG nova.compute.manager [req-4b305937-f11f-4aeb-822b-a569f89dad25 req-f17fb27c-8f3d-4ec5-8bc5-d295368de91c service nova] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Detach interface failed, port_id=0d755b84-8780-4295-a8a6-032192e3c6f7, reason: Instance 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2125.049418] env[62684]: DEBUG nova.network.neutron [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2125.124959] env[62684]: DEBUG oslo_vmware.api [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Task: {'id': task-2053348, 'name': ReconfigVM_Task, 'duration_secs': 0.534315} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2125.125354] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Reconfigured VM instance instance-00000055 to attach disk [datastore2] 2eab4a07-9b92-436e-b4f8-fa64ae949b56/2eab4a07-9b92-436e-b4f8-fa64ae949b56.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2125.126019] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-35e6d6b4-2c38-4abe-9493-f73d73cb20d3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.133631] env[62684]: DEBUG oslo_vmware.api [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Waiting for the task: (returnval){ [ 2125.133631] env[62684]: value = "task-2053349" [ 2125.133631] env[62684]: _type = "Task" [ 2125.133631] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2125.143233] env[62684]: DEBUG oslo_vmware.api [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Task: {'id': task-2053349, 'name': Rename_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2125.197669] env[62684]: DEBUG nova.network.neutron [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Updating instance_info_cache with network_info: [{"id": "77dcd033-4115-49dc-9ba1-8a05c4726df3", "address": "fa:16:3e:86:e6:b6", "network": {"id": "fa25d889-412a-475c-9ba5-a52e6e6e1a5f", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-19235775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b8afd480acb84db283976c13a9396c9d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77dcd033-41", "ovs_interfaceid": "77dcd033-4115-49dc-9ba1-8a05c4726df3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2125.207364] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4eb7c243-e814-40bc-807e-f4169255e95f tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.880s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2125.209902] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.341s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2125.211665] env[62684]: INFO nova.compute.claims [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2125.233333] env[62684]: INFO nova.scheduler.client.report [None req-4eb7c243-e814-40bc-807e-f4169255e95f tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Deleted allocations for instance 42ae6edd-e1f5-4ef8-a248-8f02e94d798e [ 2125.257353] env[62684]: INFO nova.compute.manager [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Took 30.29 seconds to build instance. [ 2125.266042] env[62684]: INFO nova.compute.manager [-] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Took 1.41 seconds to deallocate network for instance. [ 2125.273417] env[62684]: DEBUG nova.network.neutron [-] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2125.280660] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "f037d6b2-2082-4611-985e-b9a077eb8250" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2125.281624] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "f037d6b2-2082-4611-985e-b9a077eb8250" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2125.409257] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2125.409570] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Cleaning up deleted instances with incomplete migration {{(pid=62684) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 2125.643600] env[62684]: DEBUG oslo_vmware.api [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Task: {'id': task-2053349, 'name': Rename_Task, 'duration_secs': 0.138623} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2125.643980] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2125.644394] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eaae3553-212b-4229-8394-32a6351e81f2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.651150] env[62684]: DEBUG oslo_vmware.api [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Waiting for the task: (returnval){ [ 2125.651150] env[62684]: value = "task-2053350" [ 2125.651150] env[62684]: _type = "Task" [ 2125.651150] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2125.659279] env[62684]: DEBUG oslo_vmware.api [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Task: {'id': task-2053350, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2125.700602] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Releasing lock "refresh_cache-6b461482-0606-4af3-98a2-88c0318d1a69" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2125.700937] env[62684]: DEBUG nova.compute.manager [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Instance network_info: |[{"id": "77dcd033-4115-49dc-9ba1-8a05c4726df3", "address": "fa:16:3e:86:e6:b6", "network": {"id": "fa25d889-412a-475c-9ba5-a52e6e6e1a5f", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-19235775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b8afd480acb84db283976c13a9396c9d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77dcd033-41", "ovs_interfaceid": "77dcd033-4115-49dc-9ba1-8a05c4726df3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2125.701426] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:86:e6:b6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f78b07ea-f425-4622-84f4-706a5d8820a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '77dcd033-4115-49dc-9ba1-8a05c4726df3', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2125.709516] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Creating folder: Project (b8afd480acb84db283976c13a9396c9d). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2125.709977] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-65629aa8-4ec3-4a7c-b30d-3ad24a5fc44e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.721288] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Created folder: Project (b8afd480acb84db283976c13a9396c9d) in parent group-v421118. [ 2125.721522] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Creating folder: Instances. Parent ref: group-v421354. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2125.721792] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dec57ff1-05a4-40fe-a879-962190cf0c39 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.730898] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Created folder: Instances in parent group-v421354. [ 2125.731213] env[62684]: DEBUG oslo.service.loopingcall [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2125.731457] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2125.735020] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bfea7c56-9336-4179-ab33-cc45e470b26f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.750913] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4eb7c243-e814-40bc-807e-f4169255e95f tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "42ae6edd-e1f5-4ef8-a248-8f02e94d798e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.888s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2125.757396] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2125.757396] env[62684]: value = "task-2053353" [ 2125.757396] env[62684]: _type = "Task" [ 2125.757396] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2125.761679] env[62684]: DEBUG oslo_concurrency.lockutils [None req-edaf4197-8a87-4e75-9f34-244d345be018 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Lock "3a967adf-8c46-4787-b1d1-4ed701399576" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.804s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2125.767792] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053353, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2125.772722] env[62684]: DEBUG oslo_concurrency.lockutils [None req-34f28f61-e12c-4603-a1d1-a6950299c417 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2125.775381] env[62684]: INFO nova.compute.manager [-] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Took 1.26 seconds to deallocate network for instance. [ 2125.783647] env[62684]: DEBUG nova.compute.manager [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2125.912178] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2126.105777] env[62684]: DEBUG nova.compute.manager [req-456901b2-6016-4064-85c6-a13c5e813bf8 req-2193749b-cf6e-4760-9dfb-3fdf6bfd5adb service nova] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Received event network-changed-77dcd033-4115-49dc-9ba1-8a05c4726df3 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2126.106039] env[62684]: DEBUG nova.compute.manager [req-456901b2-6016-4064-85c6-a13c5e813bf8 req-2193749b-cf6e-4760-9dfb-3fdf6bfd5adb service nova] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Refreshing instance network info cache due to event network-changed-77dcd033-4115-49dc-9ba1-8a05c4726df3. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2126.106286] env[62684]: DEBUG oslo_concurrency.lockutils [req-456901b2-6016-4064-85c6-a13c5e813bf8 req-2193749b-cf6e-4760-9dfb-3fdf6bfd5adb service nova] Acquiring lock "refresh_cache-6b461482-0606-4af3-98a2-88c0318d1a69" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2126.106511] env[62684]: DEBUG oslo_concurrency.lockutils [req-456901b2-6016-4064-85c6-a13c5e813bf8 req-2193749b-cf6e-4760-9dfb-3fdf6bfd5adb service nova] Acquired lock "refresh_cache-6b461482-0606-4af3-98a2-88c0318d1a69" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2126.106694] env[62684]: DEBUG nova.network.neutron [req-456901b2-6016-4064-85c6-a13c5e813bf8 req-2193749b-cf6e-4760-9dfb-3fdf6bfd5adb service nova] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Refreshing network info cache for port 77dcd033-4115-49dc-9ba1-8a05c4726df3 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2126.163160] env[62684]: DEBUG oslo_vmware.api [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Task: {'id': task-2053350, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2126.269330] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053353, 'name': CreateVM_Task, 'duration_secs': 0.457776} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2126.269552] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2126.270191] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2126.270403] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2126.270729] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2126.270992] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2018d918-424d-49e7-bace-b6ad5768584a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.278103] env[62684]: DEBUG oslo_vmware.api [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Waiting for the task: (returnval){ [ 2126.278103] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5284bab4-cca4-d1e2-255b-988fc4ca295c" [ 2126.278103] env[62684]: _type = "Task" [ 2126.278103] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2126.283704] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5b595d12-5144-4cf8-9e3c-6cf06360aa3f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2126.293073] env[62684]: DEBUG oslo_vmware.api [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5284bab4-cca4-d1e2-255b-988fc4ca295c, 'name': SearchDatastore_Task, 'duration_secs': 0.008559} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2126.295208] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2126.295513] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2126.295805] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2126.295970] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2126.296174] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2126.296595] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-133fee8f-78ab-459c-b880-4c003305dbd5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.304276] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2126.304276] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2126.305457] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a26ac0fd-a537-4c58-bc19-9df602395691 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.308168] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2126.313604] env[62684]: DEBUG oslo_vmware.api [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Waiting for the task: (returnval){ [ 2126.313604] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]522c42bb-282f-51ce-23d6-db71b1dc78ef" [ 2126.313604] env[62684]: _type = "Task" [ 2126.313604] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2126.321345] env[62684]: DEBUG oslo_vmware.api [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]522c42bb-282f-51ce-23d6-db71b1dc78ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2126.425674] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c59a0b56-1056-484c-a570-09db921642fd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.433731] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea97b881-5b10-4938-a721-562f42320487 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.466248] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf92d916-85bc-45ce-bdaa-3879a47d57a8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.469841] env[62684]: DEBUG nova.compute.manager [req-1ef82f99-a3ce-41a8-a96e-35c916fa1d80 req-342c677d-f626-4dd1-8b85-18f6c7a4e8f2 service nova] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Received event network-vif-deleted-8d2957b9-747d-4fea-8047-0980e3548bb2 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2126.475630] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c912bcbf-c863-4db3-b249-6ae9960933e3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.490011] env[62684]: DEBUG nova.compute.provider_tree [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2126.560190] env[62684]: DEBUG nova.compute.manager [None req-127ff983-8e2f-41be-825a-333b4fe805bd tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2126.561215] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9190e6f7-7df1-4313-9f8c-44fb85a23d9e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.663659] env[62684]: DEBUG oslo_vmware.api [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Task: {'id': task-2053350, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2126.676986] env[62684]: INFO nova.compute.manager [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Rescuing [ 2126.677472] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Acquiring lock "refresh_cache-3a967adf-8c46-4787-b1d1-4ed701399576" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2126.678176] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Acquired lock "refresh_cache-3a967adf-8c46-4787-b1d1-4ed701399576" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2126.678176] env[62684]: DEBUG nova.network.neutron [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2126.701292] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "9418b42d-9fff-41fd-92d1-a832017fc9c3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2126.701525] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "9418b42d-9fff-41fd-92d1-a832017fc9c3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2126.823172] env[62684]: DEBUG oslo_vmware.api [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]522c42bb-282f-51ce-23d6-db71b1dc78ef, 'name': SearchDatastore_Task, 'duration_secs': 0.008165} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2126.823958] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-679f4d00-34dc-4290-acb7-e3a9a22a4025 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.830349] env[62684]: DEBUG oslo_vmware.api [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Waiting for the task: (returnval){ [ 2126.830349] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]524f5f90-0b15-b300-b768-246bc4db5542" [ 2126.830349] env[62684]: _type = "Task" [ 2126.830349] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2126.834661] env[62684]: DEBUG nova.network.neutron [req-456901b2-6016-4064-85c6-a13c5e813bf8 req-2193749b-cf6e-4760-9dfb-3fdf6bfd5adb service nova] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Updated VIF entry in instance network info cache for port 77dcd033-4115-49dc-9ba1-8a05c4726df3. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2126.835018] env[62684]: DEBUG nova.network.neutron [req-456901b2-6016-4064-85c6-a13c5e813bf8 req-2193749b-cf6e-4760-9dfb-3fdf6bfd5adb service nova] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Updating instance_info_cache with network_info: [{"id": "77dcd033-4115-49dc-9ba1-8a05c4726df3", "address": "fa:16:3e:86:e6:b6", "network": {"id": "fa25d889-412a-475c-9ba5-a52e6e6e1a5f", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-19235775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b8afd480acb84db283976c13a9396c9d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77dcd033-41", "ovs_interfaceid": "77dcd033-4115-49dc-9ba1-8a05c4726df3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2126.844815] env[62684]: DEBUG oslo_vmware.api [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]524f5f90-0b15-b300-b768-246bc4db5542, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2126.992994] env[62684]: DEBUG nova.scheduler.client.report [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2127.074280] env[62684]: INFO nova.compute.manager [None req-127ff983-8e2f-41be-825a-333b4fe805bd tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] instance snapshotting [ 2127.076672] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deb794b9-8f7a-4dcd-989d-a84fca2bd916 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.097843] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c577541d-75f2-4339-8ee2-438e3ce5e4b5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.161589] env[62684]: DEBUG oslo_vmware.api [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Task: {'id': task-2053350, 'name': PowerOnVM_Task, 'duration_secs': 1.013441} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2127.161912] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2127.162189] env[62684]: INFO nova.compute.manager [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Took 10.95 seconds to spawn the instance on the hypervisor. [ 2127.162422] env[62684]: DEBUG nova.compute.manager [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2127.163263] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-609ea6b3-dc3c-4034-9e68-fd9af6f7b89d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.204129] env[62684]: DEBUG nova.compute.manager [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2127.338772] env[62684]: DEBUG oslo_concurrency.lockutils [req-456901b2-6016-4064-85c6-a13c5e813bf8 req-2193749b-cf6e-4760-9dfb-3fdf6bfd5adb service nova] Releasing lock "refresh_cache-6b461482-0606-4af3-98a2-88c0318d1a69" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2127.346951] env[62684]: DEBUG oslo_vmware.api [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]524f5f90-0b15-b300-b768-246bc4db5542, 'name': SearchDatastore_Task, 'duration_secs': 0.012078} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2127.346951] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2127.347278] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 6b461482-0606-4af3-98a2-88c0318d1a69/6b461482-0606-4af3-98a2-88c0318d1a69.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2127.348445] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a04717a0-eb0f-4af0-986b-7ce8883a26b3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.357576] env[62684]: DEBUG oslo_vmware.api [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Waiting for the task: (returnval){ [ 2127.357576] env[62684]: value = "task-2053354" [ 2127.357576] env[62684]: _type = "Task" [ 2127.357576] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2127.369028] env[62684]: DEBUG oslo_vmware.api [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Task: {'id': task-2053354, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2127.392831] env[62684]: DEBUG nova.network.neutron [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Updating instance_info_cache with network_info: [{"id": "c12d6728-00ee-47e7-9fa8-92384e9f7a3c", "address": "fa:16:3e:50:c8:42", "network": {"id": "8eebb0b3-51e4-44c0-a4b4-b45647332a9e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-398889609-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "43304d5c52344bd9841dbc760a174b4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc12d6728-00", "ovs_interfaceid": "c12d6728-00ee-47e7-9fa8-92384e9f7a3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2127.498505] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.288s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2127.499052] env[62684]: DEBUG nova.compute.manager [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2127.501682] env[62684]: DEBUG oslo_concurrency.lockutils [None req-34f28f61-e12c-4603-a1d1-a6950299c417 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.729s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2127.501912] env[62684]: DEBUG nova.objects.instance [None req-34f28f61-e12c-4603-a1d1-a6950299c417 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lazy-loading 'resources' on Instance uuid 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2127.609156] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-127ff983-8e2f-41be-825a-333b4fe805bd tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Creating Snapshot of the VM instance {{(pid=62684) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2127.609534] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-14993b11-ed74-42e1-b037-ce0a02bebc56 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.618086] env[62684]: DEBUG oslo_vmware.api [None req-127ff983-8e2f-41be-825a-333b4fe805bd tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for the task: (returnval){ [ 2127.618086] env[62684]: value = "task-2053355" [ 2127.618086] env[62684]: _type = "Task" [ 2127.618086] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2127.627461] env[62684]: DEBUG oslo_vmware.api [None req-127ff983-8e2f-41be-825a-333b4fe805bd tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053355, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2127.681261] env[62684]: INFO nova.compute.manager [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Took 31.43 seconds to build instance. [ 2127.728418] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2127.869261] env[62684]: DEBUG oslo_vmware.api [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Task: {'id': task-2053354, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2127.896337] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Releasing lock "refresh_cache-3a967adf-8c46-4787-b1d1-4ed701399576" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2128.004824] env[62684]: DEBUG nova.compute.utils [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2128.009981] env[62684]: DEBUG nova.compute.manager [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2128.010306] env[62684]: DEBUG nova.network.neutron [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2128.090308] env[62684]: DEBUG nova.policy [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a3800d71923848db8635de9a8a2ff9f6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '76d88ac878d44480b3b54b24ab87efa9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2128.138262] env[62684]: DEBUG oslo_vmware.api [None req-127ff983-8e2f-41be-825a-333b4fe805bd tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053355, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2128.183544] env[62684]: DEBUG oslo_concurrency.lockutils [None req-28d50267-bcea-40ac-9b79-1108449d55c5 tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Lock "2eab4a07-9b92-436e-b4f8-fa64ae949b56" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.937s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2128.311314] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-016c70d5-bcc0-4633-9cc6-bf0af6245ce1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.320571] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e631036c-80b1-4813-b598-b636bb24baa9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.355810] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d9fed0f-623b-4eb6-a8b6-6b94b0a8d3a2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.367358] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-096e66d6-c192-4fb5-bbbd-c8a53b3722af {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.374378] env[62684]: DEBUG oslo_vmware.api [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Task: {'id': task-2053354, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.801598} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2128.375094] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 6b461482-0606-4af3-98a2-88c0318d1a69/6b461482-0606-4af3-98a2-88c0318d1a69.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2128.375304] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2128.375572] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7cce17e3-c9c7-45a5-b240-325d5415429d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.385330] env[62684]: DEBUG nova.compute.provider_tree [None req-34f28f61-e12c-4603-a1d1-a6950299c417 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2128.391171] env[62684]: DEBUG oslo_vmware.api [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Waiting for the task: (returnval){ [ 2128.391171] env[62684]: value = "task-2053356" [ 2128.391171] env[62684]: _type = "Task" [ 2128.391171] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2128.399278] env[62684]: DEBUG oslo_vmware.api [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Task: {'id': task-2053356, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2128.429940] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2128.432070] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9dc2f043-df5c-4b6b-9a99-bff077bd86c0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.437934] env[62684]: DEBUG oslo_vmware.api [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for the task: (returnval){ [ 2128.437934] env[62684]: value = "task-2053357" [ 2128.437934] env[62684]: _type = "Task" [ 2128.437934] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2128.447495] env[62684]: DEBUG oslo_vmware.api [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053357, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2128.514314] env[62684]: DEBUG nova.compute.manager [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2128.593753] env[62684]: DEBUG nova.network.neutron [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Successfully created port: f121aad3-8e11-4583-8919-c502deebb5e2 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2128.634385] env[62684]: DEBUG oslo_vmware.api [None req-127ff983-8e2f-41be-825a-333b4fe805bd tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053355, 'name': CreateSnapshot_Task, 'duration_secs': 0.901861} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2128.634763] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-127ff983-8e2f-41be-825a-333b4fe805bd tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Created Snapshot of the VM instance {{(pid=62684) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2128.638752] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0825c521-abaf-4514-a673-a7a579e4780c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.890125] env[62684]: DEBUG nova.scheduler.client.report [None req-34f28f61-e12c-4603-a1d1-a6950299c417 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2128.902016] env[62684]: DEBUG oslo_vmware.api [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Task: {'id': task-2053356, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.290782} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2128.902522] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2128.903386] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9b0ff77-932b-4b2c-8eb4-416bdc23f43d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.926910] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] 6b461482-0606-4af3-98a2-88c0318d1a69/6b461482-0606-4af3-98a2-88c0318d1a69.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2128.927584] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-826a2af1-7648-41fe-82c4-3e9f05f138e7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.953996] env[62684]: DEBUG oslo_vmware.api [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053357, 'name': PowerOffVM_Task, 'duration_secs': 0.347394} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2128.955239] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2128.955598] env[62684]: DEBUG oslo_vmware.api [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Waiting for the task: (returnval){ [ 2128.955598] env[62684]: value = "task-2053358" [ 2128.955598] env[62684]: _type = "Task" [ 2128.955598] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2128.956307] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-674b019a-9a8d-47b0-bb25-40f00af6cbee {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.966457] env[62684]: DEBUG oslo_vmware.api [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Task: {'id': task-2053358, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2128.981311] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21d753ac-ea0c-40c5-8bce-cb33b2742a8c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.014390] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2129.015087] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eae8804e-ad54-410a-bcc7-f3111e2a6835 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.023846] env[62684]: DEBUG oslo_vmware.api [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for the task: (returnval){ [ 2129.023846] env[62684]: value = "task-2053359" [ 2129.023846] env[62684]: _type = "Task" [ 2129.023846] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2129.032895] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] VM already powered off {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2129.033127] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2129.033383] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2129.033537] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2129.033718] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2129.033967] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6d085844-0e6a-44a2-8724-ec22b23fc11b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.042359] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2129.042548] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2129.043410] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ee59005-31ef-43ce-8646-9e0c8f4c9cb7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.048342] env[62684]: DEBUG oslo_vmware.api [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for the task: (returnval){ [ 2129.048342] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5274405b-cfd5-8be2-2760-1a647b4e0995" [ 2129.048342] env[62684]: _type = "Task" [ 2129.048342] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2129.055999] env[62684]: DEBUG oslo_vmware.api [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5274405b-cfd5-8be2-2760-1a647b4e0995, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2129.156363] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-127ff983-8e2f-41be-825a-333b4fe805bd tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Creating linked-clone VM from snapshot {{(pid=62684) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2129.156751] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-9943dbd8-9883-4fb9-a24a-eaf92e8a70d4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.167023] env[62684]: DEBUG oslo_vmware.api [None req-127ff983-8e2f-41be-825a-333b4fe805bd tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for the task: (returnval){ [ 2129.167023] env[62684]: value = "task-2053360" [ 2129.167023] env[62684]: _type = "Task" [ 2129.167023] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2129.175933] env[62684]: DEBUG oslo_vmware.api [None req-127ff983-8e2f-41be-825a-333b4fe805bd tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053360, 'name': CloneVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2129.398269] env[62684]: DEBUG oslo_concurrency.lockutils [None req-34f28f61-e12c-4603-a1d1-a6950299c417 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.896s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2129.402324] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5b595d12-5144-4cf8-9e3c-6cf06360aa3f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.118s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2129.402633] env[62684]: DEBUG nova.objects.instance [None req-5b595d12-5144-4cf8-9e3c-6cf06360aa3f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lazy-loading 'resources' on Instance uuid 28886f7c-6efc-4505-84f6-682d75cea215 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2129.429677] env[62684]: INFO nova.scheduler.client.report [None req-34f28f61-e12c-4603-a1d1-a6950299c417 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Deleted allocations for instance 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf [ 2129.469802] env[62684]: DEBUG oslo_vmware.api [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Task: {'id': task-2053358, 'name': ReconfigVM_Task, 'duration_secs': 0.276987} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2129.470163] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Reconfigured VM instance instance-00000056 to attach disk [datastore1] 6b461482-0606-4af3-98a2-88c0318d1a69/6b461482-0606-4af3-98a2-88c0318d1a69.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2129.470893] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c4d005e6-fd03-4916-9910-006afd68bc38 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.478120] env[62684]: DEBUG oslo_vmware.api [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Waiting for the task: (returnval){ [ 2129.478120] env[62684]: value = "task-2053361" [ 2129.478120] env[62684]: _type = "Task" [ 2129.478120] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2129.488035] env[62684]: DEBUG oslo_vmware.api [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Task: {'id': task-2053361, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2129.525944] env[62684]: DEBUG nova.compute.manager [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2129.557331] env[62684]: DEBUG nova.virt.hardware [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2129.557693] env[62684]: DEBUG nova.virt.hardware [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2129.557865] env[62684]: DEBUG nova.virt.hardware [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2129.558170] env[62684]: DEBUG nova.virt.hardware [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2129.558397] env[62684]: DEBUG nova.virt.hardware [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2129.558617] env[62684]: DEBUG nova.virt.hardware [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2129.558875] env[62684]: DEBUG nova.virt.hardware [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2129.559127] env[62684]: DEBUG nova.virt.hardware [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2129.559356] env[62684]: DEBUG nova.virt.hardware [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2129.559625] env[62684]: DEBUG nova.virt.hardware [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2129.559906] env[62684]: DEBUG nova.virt.hardware [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2129.561659] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4da2b07c-b921-4f49-8687-263e498cffd7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.571132] env[62684]: DEBUG oslo_vmware.api [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5274405b-cfd5-8be2-2760-1a647b4e0995, 'name': SearchDatastore_Task, 'duration_secs': 0.009918} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2129.574119] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9a4db49-73c0-492b-9e34-15b9a6ef2fb0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.577717] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6ba6cf4-4953-482e-b412-2f13f981d1b7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.585931] env[62684]: DEBUG oslo_vmware.api [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for the task: (returnval){ [ 2129.585931] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d0351c-827e-fe65-ea54-1cc0c906152a" [ 2129.585931] env[62684]: _type = "Task" [ 2129.585931] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2129.603653] env[62684]: DEBUG oslo_vmware.api [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d0351c-827e-fe65-ea54-1cc0c906152a, 'name': SearchDatastore_Task, 'duration_secs': 0.009305} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2129.603961] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2129.604265] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 3a967adf-8c46-4787-b1d1-4ed701399576/3931321c-cb4c-4b87-8d3a-50e05ea01db2-rescue.vmdk. {{(pid=62684) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 2129.604597] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f4328f82-dce6-427c-a5ad-ee4b3b8688b3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.613276] env[62684]: DEBUG oslo_vmware.api [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for the task: (returnval){ [ 2129.613276] env[62684]: value = "task-2053362" [ 2129.613276] env[62684]: _type = "Task" [ 2129.613276] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2129.621865] env[62684]: DEBUG oslo_vmware.api [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053362, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2129.677817] env[62684]: DEBUG oslo_vmware.api [None req-127ff983-8e2f-41be-825a-333b4fe805bd tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053360, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2129.800629] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1b599c44-de5b-46f9-b7c2-0cb29f9e33ac tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Acquiring lock "2eab4a07-9b92-436e-b4f8-fa64ae949b56" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2129.800892] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1b599c44-de5b-46f9-b7c2-0cb29f9e33ac tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Lock "2eab4a07-9b92-436e-b4f8-fa64ae949b56" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2129.801093] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1b599c44-de5b-46f9-b7c2-0cb29f9e33ac tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Acquiring lock "2eab4a07-9b92-436e-b4f8-fa64ae949b56-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2129.801302] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1b599c44-de5b-46f9-b7c2-0cb29f9e33ac tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Lock "2eab4a07-9b92-436e-b4f8-fa64ae949b56-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2129.801530] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1b599c44-de5b-46f9-b7c2-0cb29f9e33ac tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Lock "2eab4a07-9b92-436e-b4f8-fa64ae949b56-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2129.804093] env[62684]: INFO nova.compute.manager [None req-1b599c44-de5b-46f9-b7c2-0cb29f9e33ac tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Terminating instance [ 2129.806670] env[62684]: DEBUG nova.compute.manager [None req-1b599c44-de5b-46f9-b7c2-0cb29f9e33ac tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2129.806910] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-1b599c44-de5b-46f9-b7c2-0cb29f9e33ac tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2129.807874] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f72013c-0919-4704-ae7b-4f67fd682080 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.817999] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b599c44-de5b-46f9-b7c2-0cb29f9e33ac tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2129.818355] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2ea6ce4d-6483-452a-92e3-a68449beb80b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.825366] env[62684]: DEBUG oslo_vmware.api [None req-1b599c44-de5b-46f9-b7c2-0cb29f9e33ac tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Waiting for the task: (returnval){ [ 2129.825366] env[62684]: value = "task-2053363" [ 2129.825366] env[62684]: _type = "Task" [ 2129.825366] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2129.834617] env[62684]: DEBUG oslo_vmware.api [None req-1b599c44-de5b-46f9-b7c2-0cb29f9e33ac tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Task: {'id': task-2053363, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2129.939073] env[62684]: DEBUG oslo_concurrency.lockutils [None req-34f28f61-e12c-4603-a1d1-a6950299c417 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "8cc68353-4678-4ee7-8c0d-3d71e6bf05bf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.809s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2129.995400] env[62684]: DEBUG oslo_vmware.api [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Task: {'id': task-2053361, 'name': Rename_Task, 'duration_secs': 0.146577} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2129.999092] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2130.000153] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8ac47542-1f87-4937-94e2-c0e9c316baf4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.008417] env[62684]: DEBUG oslo_vmware.api [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Waiting for the task: (returnval){ [ 2130.008417] env[62684]: value = "task-2053364" [ 2130.008417] env[62684]: _type = "Task" [ 2130.008417] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2130.020908] env[62684]: DEBUG oslo_vmware.api [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Task: {'id': task-2053364, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2130.068903] env[62684]: DEBUG nova.compute.manager [req-afecd936-ea92-4dfb-bcbf-d7a658be8cc1 req-6ff4aa89-d455-41f8-bf91-4cd91711d792 service nova] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Received event network-vif-plugged-f121aad3-8e11-4583-8919-c502deebb5e2 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2130.069161] env[62684]: DEBUG oslo_concurrency.lockutils [req-afecd936-ea92-4dfb-bcbf-d7a658be8cc1 req-6ff4aa89-d455-41f8-bf91-4cd91711d792 service nova] Acquiring lock "0a8d7c48-cf90-4baf-a900-38fbd62869a6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2130.069498] env[62684]: DEBUG oslo_concurrency.lockutils [req-afecd936-ea92-4dfb-bcbf-d7a658be8cc1 req-6ff4aa89-d455-41f8-bf91-4cd91711d792 service nova] Lock "0a8d7c48-cf90-4baf-a900-38fbd62869a6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2130.069642] env[62684]: DEBUG oslo_concurrency.lockutils [req-afecd936-ea92-4dfb-bcbf-d7a658be8cc1 req-6ff4aa89-d455-41f8-bf91-4cd91711d792 service nova] Lock "0a8d7c48-cf90-4baf-a900-38fbd62869a6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2130.069840] env[62684]: DEBUG nova.compute.manager [req-afecd936-ea92-4dfb-bcbf-d7a658be8cc1 req-6ff4aa89-d455-41f8-bf91-4cd91711d792 service nova] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] No waiting events found dispatching network-vif-plugged-f121aad3-8e11-4583-8919-c502deebb5e2 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2130.070360] env[62684]: WARNING nova.compute.manager [req-afecd936-ea92-4dfb-bcbf-d7a658be8cc1 req-6ff4aa89-d455-41f8-bf91-4cd91711d792 service nova] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Received unexpected event network-vif-plugged-f121aad3-8e11-4583-8919-c502deebb5e2 for instance with vm_state building and task_state spawning. [ 2130.131068] env[62684]: DEBUG oslo_vmware.api [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053362, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.458247} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2130.131390] env[62684]: INFO nova.virt.vmwareapi.ds_util [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 3a967adf-8c46-4787-b1d1-4ed701399576/3931321c-cb4c-4b87-8d3a-50e05ea01db2-rescue.vmdk. [ 2130.134037] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b82f97c-4c7a-4644-b971-c5e5b9326707 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.166940] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] 3a967adf-8c46-4787-b1d1-4ed701399576/3931321c-cb4c-4b87-8d3a-50e05ea01db2-rescue.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2130.167343] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8f4c646c-165a-4f92-aadf-66bb1e05d426 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.184937] env[62684]: DEBUG nova.network.neutron [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Successfully updated port: f121aad3-8e11-4583-8919-c502deebb5e2 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2130.195909] env[62684]: DEBUG oslo_vmware.api [None req-127ff983-8e2f-41be-825a-333b4fe805bd tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053360, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2130.197324] env[62684]: DEBUG oslo_vmware.api [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for the task: (returnval){ [ 2130.197324] env[62684]: value = "task-2053365" [ 2130.197324] env[62684]: _type = "Task" [ 2130.197324] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2130.210163] env[62684]: DEBUG oslo_vmware.api [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053365, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2130.227190] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45a1b106-799c-4c09-8ffe-87ec16255a6b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.235017] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a091e05-1512-4e0b-9fc2-ef2efb41e90f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.266518] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bcb462f-9179-48da-a356-b6ac1ce3ef72 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.275513] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a300bf9-55bb-4c9e-8039-16bb6ee98514 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.290583] env[62684]: DEBUG nova.compute.provider_tree [None req-5b595d12-5144-4cf8-9e3c-6cf06360aa3f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2130.335831] env[62684]: DEBUG oslo_vmware.api [None req-1b599c44-de5b-46f9-b7c2-0cb29f9e33ac tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Task: {'id': task-2053363, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2130.518921] env[62684]: DEBUG oslo_vmware.api [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Task: {'id': task-2053364, 'name': PowerOnVM_Task, 'duration_secs': 0.488411} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2130.519201] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2130.519247] env[62684]: INFO nova.compute.manager [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Took 8.06 seconds to spawn the instance on the hypervisor. [ 2130.519423] env[62684]: DEBUG nova.compute.manager [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2130.520237] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc16f6a8-4193-4cbb-bacc-d64c24d05573 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.643998] env[62684]: DEBUG oslo_concurrency.lockutils [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "aebbc2cc-8973-4907-9ec8-085027fd7ca3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2130.644343] env[62684]: DEBUG oslo_concurrency.lockutils [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "aebbc2cc-8973-4907-9ec8-085027fd7ca3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2130.687487] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "refresh_cache-0a8d7c48-cf90-4baf-a900-38fbd62869a6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2130.687722] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquired lock "refresh_cache-0a8d7c48-cf90-4baf-a900-38fbd62869a6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2130.687997] env[62684]: DEBUG nova.network.neutron [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2130.703029] env[62684]: DEBUG oslo_vmware.api [None req-127ff983-8e2f-41be-825a-333b4fe805bd tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053360, 'name': CloneVM_Task} progress is 95%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2130.715537] env[62684]: DEBUG oslo_vmware.api [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053365, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2130.794493] env[62684]: DEBUG nova.scheduler.client.report [None req-5b595d12-5144-4cf8-9e3c-6cf06360aa3f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2130.840574] env[62684]: DEBUG oslo_vmware.api [None req-1b599c44-de5b-46f9-b7c2-0cb29f9e33ac tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Task: {'id': task-2053363, 'name': PowerOffVM_Task, 'duration_secs': 0.569795} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2130.840970] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b599c44-de5b-46f9-b7c2-0cb29f9e33ac tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2130.841249] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-1b599c44-de5b-46f9-b7c2-0cb29f9e33ac tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2130.841613] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0edea1a1-408d-4066-9c40-79b3b829c0e5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.037306] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-1b599c44-de5b-46f9-b7c2-0cb29f9e33ac tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2131.037542] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-1b599c44-de5b-46f9-b7c2-0cb29f9e33ac tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2131.037807] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b599c44-de5b-46f9-b7c2-0cb29f9e33ac tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Deleting the datastore file [datastore2] 2eab4a07-9b92-436e-b4f8-fa64ae949b56 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2131.039954] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a1abe309-ee17-4a9d-94f4-f64248f772ee {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.042467] env[62684]: INFO nova.compute.manager [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Took 26.74 seconds to build instance. [ 2131.048634] env[62684]: DEBUG oslo_vmware.api [None req-1b599c44-de5b-46f9-b7c2-0cb29f9e33ac tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Waiting for the task: (returnval){ [ 2131.048634] env[62684]: value = "task-2053367" [ 2131.048634] env[62684]: _type = "Task" [ 2131.048634] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2131.057237] env[62684]: DEBUG oslo_vmware.api [None req-1b599c44-de5b-46f9-b7c2-0cb29f9e33ac tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Task: {'id': task-2053367, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2131.147062] env[62684]: DEBUG nova.compute.manager [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2131.199864] env[62684]: DEBUG oslo_vmware.api [None req-127ff983-8e2f-41be-825a-333b4fe805bd tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053360, 'name': CloneVM_Task} progress is 95%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2131.210801] env[62684]: DEBUG oslo_vmware.api [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053365, 'name': ReconfigVM_Task, 'duration_secs': 0.732768} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2131.211392] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Reconfigured VM instance instance-00000054 to attach disk [datastore2] 3a967adf-8c46-4787-b1d1-4ed701399576/3931321c-cb4c-4b87-8d3a-50e05ea01db2-rescue.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2131.212242] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91d9a69d-ab0f-4345-a1e7-18862d6b6860 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.241720] env[62684]: DEBUG nova.network.neutron [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2131.243750] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6040c27f-a607-49a3-9c9a-1e291f45666e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.259532] env[62684]: DEBUG oslo_vmware.api [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for the task: (returnval){ [ 2131.259532] env[62684]: value = "task-2053368" [ 2131.259532] env[62684]: _type = "Task" [ 2131.259532] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2131.270039] env[62684]: DEBUG oslo_vmware.api [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053368, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2131.299866] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5b595d12-5144-4cf8-9e3c-6cf06360aa3f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.898s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2131.302058] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.994s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2131.303961] env[62684]: INFO nova.compute.claims [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2131.395418] env[62684]: DEBUG nova.network.neutron [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Updating instance_info_cache with network_info: [{"id": "f121aad3-8e11-4583-8919-c502deebb5e2", "address": "fa:16:3e:08:50:be", "network": {"id": "7678b347-6a54-4b84-9a4d-b566bbeb1ea4", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-51664912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76d88ac878d44480b3b54b24ab87efa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf121aad3-8e", "ovs_interfaceid": "f121aad3-8e11-4583-8919-c502deebb5e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2131.457926] env[62684]: INFO nova.scheduler.client.report [None req-5b595d12-5144-4cf8-9e3c-6cf06360aa3f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Deleted allocations for instance 28886f7c-6efc-4505-84f6-682d75cea215 [ 2131.544982] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cfb3c904-442b-4f6d-b983-44c421f4a871 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Lock "6b461482-0606-4af3-98a2-88c0318d1a69" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.248s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2131.558417] env[62684]: DEBUG oslo_vmware.api [None req-1b599c44-de5b-46f9-b7c2-0cb29f9e33ac tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Task: {'id': task-2053367, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.347143} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2131.558593] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b599c44-de5b-46f9-b7c2-0cb29f9e33ac tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2131.558801] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-1b599c44-de5b-46f9-b7c2-0cb29f9e33ac tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2131.559220] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-1b599c44-de5b-46f9-b7c2-0cb29f9e33ac tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2131.559296] env[62684]: INFO nova.compute.manager [None req-1b599c44-de5b-46f9-b7c2-0cb29f9e33ac tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Took 1.75 seconds to destroy the instance on the hypervisor. [ 2131.559538] env[62684]: DEBUG oslo.service.loopingcall [None req-1b599c44-de5b-46f9-b7c2-0cb29f9e33ac tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2131.559739] env[62684]: DEBUG nova.compute.manager [-] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2131.559835] env[62684]: DEBUG nova.network.neutron [-] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2131.670140] env[62684]: DEBUG oslo_concurrency.lockutils [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2131.698349] env[62684]: DEBUG oslo_vmware.api [None req-127ff983-8e2f-41be-825a-333b4fe805bd tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053360, 'name': CloneVM_Task, 'duration_secs': 2.151501} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2131.698635] env[62684]: INFO nova.virt.vmwareapi.vmops [None req-127ff983-8e2f-41be-825a-333b4fe805bd tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Created linked-clone VM from snapshot [ 2131.699436] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4f62dd9-dec5-4d88-84a6-5fc314d8f142 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.708531] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-127ff983-8e2f-41be-825a-333b4fe805bd tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Uploading image 0fb22426-fb87-45f8-abe5-a3064061a9f3 {{(pid=62684) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2131.724084] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-127ff983-8e2f-41be-825a-333b4fe805bd tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Destroying the VM {{(pid=62684) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2131.724397] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-72254708-ef97-41f2-9850-50c76f4a65fe {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.731349] env[62684]: DEBUG oslo_vmware.api [None req-127ff983-8e2f-41be-825a-333b4fe805bd tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for the task: (returnval){ [ 2131.731349] env[62684]: value = "task-2053369" [ 2131.731349] env[62684]: _type = "Task" [ 2131.731349] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2131.739641] env[62684]: DEBUG oslo_vmware.api [None req-127ff983-8e2f-41be-825a-333b4fe805bd tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053369, 'name': Destroy_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2131.769433] env[62684]: DEBUG oslo_vmware.api [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053368, 'name': ReconfigVM_Task, 'duration_secs': 0.165584} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2131.769713] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2131.770084] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a6e7777b-c4ea-4d60-9973-9cca1c4b8424 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.776161] env[62684]: DEBUG oslo_vmware.api [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for the task: (returnval){ [ 2131.776161] env[62684]: value = "task-2053370" [ 2131.776161] env[62684]: _type = "Task" [ 2131.776161] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2131.783963] env[62684]: DEBUG oslo_vmware.api [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053370, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2131.900787] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Releasing lock "refresh_cache-0a8d7c48-cf90-4baf-a900-38fbd62869a6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2131.901192] env[62684]: DEBUG nova.compute.manager [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Instance network_info: |[{"id": "f121aad3-8e11-4583-8919-c502deebb5e2", "address": "fa:16:3e:08:50:be", "network": {"id": "7678b347-6a54-4b84-9a4d-b566bbeb1ea4", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-51664912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76d88ac878d44480b3b54b24ab87efa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf121aad3-8e", "ovs_interfaceid": "f121aad3-8e11-4583-8919-c502deebb5e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2131.901788] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:08:50:be', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'de5fcb06-b0d0-467f-86fe-06882165ac31', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f121aad3-8e11-4583-8919-c502deebb5e2', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2131.914069] env[62684]: DEBUG oslo.service.loopingcall [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2131.914433] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2131.914776] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3ec1e323-9baf-4774-aa79-ba5d3e355eb2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.946139] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2131.946139] env[62684]: value = "task-2053371" [ 2131.946139] env[62684]: _type = "Task" [ 2131.946139] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2131.957252] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053371, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2131.966085] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5b595d12-5144-4cf8-9e3c-6cf06360aa3f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "28886f7c-6efc-4505-84f6-682d75cea215" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.870s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2132.111305] env[62684]: DEBUG nova.compute.manager [req-32e20b49-9f94-42f4-8593-187bdb5b6d32 req-86a3f695-985c-474b-8e32-e4c801e3fbc9 service nova] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Received event network-changed-f121aad3-8e11-4583-8919-c502deebb5e2 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2132.111636] env[62684]: DEBUG nova.compute.manager [req-32e20b49-9f94-42f4-8593-187bdb5b6d32 req-86a3f695-985c-474b-8e32-e4c801e3fbc9 service nova] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Refreshing instance network info cache due to event network-changed-f121aad3-8e11-4583-8919-c502deebb5e2. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2132.111955] env[62684]: DEBUG oslo_concurrency.lockutils [req-32e20b49-9f94-42f4-8593-187bdb5b6d32 req-86a3f695-985c-474b-8e32-e4c801e3fbc9 service nova] Acquiring lock "refresh_cache-0a8d7c48-cf90-4baf-a900-38fbd62869a6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2132.112218] env[62684]: DEBUG oslo_concurrency.lockutils [req-32e20b49-9f94-42f4-8593-187bdb5b6d32 req-86a3f695-985c-474b-8e32-e4c801e3fbc9 service nova] Acquired lock "refresh_cache-0a8d7c48-cf90-4baf-a900-38fbd62869a6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2132.112881] env[62684]: DEBUG nova.network.neutron [req-32e20b49-9f94-42f4-8593-187bdb5b6d32 req-86a3f695-985c-474b-8e32-e4c801e3fbc9 service nova] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Refreshing network info cache for port f121aad3-8e11-4583-8919-c502deebb5e2 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2132.234607] env[62684]: DEBUG nova.compute.manager [req-96e57a0b-b5e0-4146-9c69-77d26706beb2 req-6be79888-97c2-4e19-bf28-e3ccd78cd684 service nova] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Received event network-changed-77dcd033-4115-49dc-9ba1-8a05c4726df3 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2132.234736] env[62684]: DEBUG nova.compute.manager [req-96e57a0b-b5e0-4146-9c69-77d26706beb2 req-6be79888-97c2-4e19-bf28-e3ccd78cd684 service nova] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Refreshing instance network info cache due to event network-changed-77dcd033-4115-49dc-9ba1-8a05c4726df3. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2132.236114] env[62684]: DEBUG oslo_concurrency.lockutils [req-96e57a0b-b5e0-4146-9c69-77d26706beb2 req-6be79888-97c2-4e19-bf28-e3ccd78cd684 service nova] Acquiring lock "refresh_cache-6b461482-0606-4af3-98a2-88c0318d1a69" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2132.236273] env[62684]: DEBUG oslo_concurrency.lockutils [req-96e57a0b-b5e0-4146-9c69-77d26706beb2 req-6be79888-97c2-4e19-bf28-e3ccd78cd684 service nova] Acquired lock "refresh_cache-6b461482-0606-4af3-98a2-88c0318d1a69" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2132.236569] env[62684]: DEBUG nova.network.neutron [req-96e57a0b-b5e0-4146-9c69-77d26706beb2 req-6be79888-97c2-4e19-bf28-e3ccd78cd684 service nova] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Refreshing network info cache for port 77dcd033-4115-49dc-9ba1-8a05c4726df3 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2132.248698] env[62684]: DEBUG oslo_vmware.api [None req-127ff983-8e2f-41be-825a-333b4fe805bd tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053369, 'name': Destroy_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2132.286857] env[62684]: DEBUG oslo_vmware.api [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053370, 'name': PowerOnVM_Task, 'duration_secs': 0.421942} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2132.286857] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2132.289638] env[62684]: DEBUG nova.compute.manager [None req-b455c2bb-087e-4ff6-b732-b5dd24b40d76 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2132.290528] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-247a6e2c-04fd-4dbb-9af6-fe8a274e663b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.312637] env[62684]: DEBUG nova.network.neutron [-] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2132.456904] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053371, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2132.545887] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0af9073a-d0d7-4613-b711-daf5eb134338 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.555053] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28e859d8-127a-4648-b3ec-702d93469ed3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.586889] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94c6cb0a-9a1d-4526-83eb-f86cb3d499e2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.595066] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d483190-f6bd-4435-a837-2c11f70b7963 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.610802] env[62684]: DEBUG nova.compute.provider_tree [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2132.749106] env[62684]: DEBUG oslo_vmware.api [None req-127ff983-8e2f-41be-825a-333b4fe805bd tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053369, 'name': Destroy_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2132.818372] env[62684]: INFO nova.compute.manager [-] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Took 1.26 seconds to deallocate network for instance. [ 2132.849442] env[62684]: DEBUG nova.network.neutron [req-32e20b49-9f94-42f4-8593-187bdb5b6d32 req-86a3f695-985c-474b-8e32-e4c801e3fbc9 service nova] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Updated VIF entry in instance network info cache for port f121aad3-8e11-4583-8919-c502deebb5e2. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2132.849793] env[62684]: DEBUG nova.network.neutron [req-32e20b49-9f94-42f4-8593-187bdb5b6d32 req-86a3f695-985c-474b-8e32-e4c801e3fbc9 service nova] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Updating instance_info_cache with network_info: [{"id": "f121aad3-8e11-4583-8919-c502deebb5e2", "address": "fa:16:3e:08:50:be", "network": {"id": "7678b347-6a54-4b84-9a4d-b566bbeb1ea4", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-51664912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76d88ac878d44480b3b54b24ab87efa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf121aad3-8e", "ovs_interfaceid": "f121aad3-8e11-4583-8919-c502deebb5e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2132.957554] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053371, 'name': CreateVM_Task, 'duration_secs': 0.72556} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2132.957712] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2132.958549] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2132.958714] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2132.959599] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2132.959599] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e72829f8-9955-45b6-ab96-a0660f4a44ca {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.969041] env[62684]: DEBUG oslo_vmware.api [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2132.969041] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5257d6b0-7be9-e299-39d4-e5da8255a1fd" [ 2132.969041] env[62684]: _type = "Task" [ 2132.969041] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2132.975499] env[62684]: DEBUG oslo_vmware.api [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5257d6b0-7be9-e299-39d4-e5da8255a1fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2133.022932] env[62684]: DEBUG nova.network.neutron [req-96e57a0b-b5e0-4146-9c69-77d26706beb2 req-6be79888-97c2-4e19-bf28-e3ccd78cd684 service nova] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Updated VIF entry in instance network info cache for port 77dcd033-4115-49dc-9ba1-8a05c4726df3. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2133.023328] env[62684]: DEBUG nova.network.neutron [req-96e57a0b-b5e0-4146-9c69-77d26706beb2 req-6be79888-97c2-4e19-bf28-e3ccd78cd684 service nova] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Updating instance_info_cache with network_info: [{"id": "77dcd033-4115-49dc-9ba1-8a05c4726df3", "address": "fa:16:3e:86:e6:b6", "network": {"id": "fa25d889-412a-475c-9ba5-a52e6e6e1a5f", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-19235775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.134", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b8afd480acb84db283976c13a9396c9d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77dcd033-41", "ovs_interfaceid": "77dcd033-4115-49dc-9ba1-8a05c4726df3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2133.114597] env[62684]: DEBUG nova.scheduler.client.report [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2133.246395] env[62684]: DEBUG oslo_vmware.api [None req-127ff983-8e2f-41be-825a-333b4fe805bd tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053369, 'name': Destroy_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2133.324218] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1b599c44-de5b-46f9-b7c2-0cb29f9e33ac tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2133.352280] env[62684]: DEBUG oslo_concurrency.lockutils [req-32e20b49-9f94-42f4-8593-187bdb5b6d32 req-86a3f695-985c-474b-8e32-e4c801e3fbc9 service nova] Releasing lock "refresh_cache-0a8d7c48-cf90-4baf-a900-38fbd62869a6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2133.353224] env[62684]: DEBUG nova.compute.manager [req-32e20b49-9f94-42f4-8593-187bdb5b6d32 req-86a3f695-985c-474b-8e32-e4c801e3fbc9 service nova] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Received event network-vif-deleted-f01f26d4-f4bb-4d9a-b6dd-3451ffb353bb {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2133.353224] env[62684]: INFO nova.compute.manager [req-32e20b49-9f94-42f4-8593-187bdb5b6d32 req-86a3f695-985c-474b-8e32-e4c801e3fbc9 service nova] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Neutron deleted interface f01f26d4-f4bb-4d9a-b6dd-3451ffb353bb; detaching it from the instance and deleting it from the info cache [ 2133.353224] env[62684]: DEBUG nova.network.neutron [req-32e20b49-9f94-42f4-8593-187bdb5b6d32 req-86a3f695-985c-474b-8e32-e4c801e3fbc9 service nova] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2133.478368] env[62684]: DEBUG oslo_vmware.api [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5257d6b0-7be9-e299-39d4-e5da8255a1fd, 'name': SearchDatastore_Task, 'duration_secs': 0.012345} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2133.478368] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2133.478368] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2133.478368] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2133.478710] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2133.478710] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2133.479917] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2ac51fff-5a00-4cc7-8c59-cae9f893b160 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.491210] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2133.491210] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2133.491906] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8af14aa-9728-4086-a655-792a40b4ca69 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.499645] env[62684]: DEBUG oslo_vmware.api [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2133.499645] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5283b17f-828f-062f-679f-8e3bddcc6023" [ 2133.499645] env[62684]: _type = "Task" [ 2133.499645] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2133.508241] env[62684]: DEBUG oslo_vmware.api [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5283b17f-828f-062f-679f-8e3bddcc6023, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2133.527114] env[62684]: DEBUG oslo_concurrency.lockutils [req-96e57a0b-b5e0-4146-9c69-77d26706beb2 req-6be79888-97c2-4e19-bf28-e3ccd78cd684 service nova] Releasing lock "refresh_cache-6b461482-0606-4af3-98a2-88c0318d1a69" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2133.621071] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.319s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2133.621696] env[62684]: DEBUG nova.compute.manager [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2133.624649] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.896s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2133.626117] env[62684]: INFO nova.compute.claims [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2133.746719] env[62684]: DEBUG oslo_vmware.api [None req-127ff983-8e2f-41be-825a-333b4fe805bd tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053369, 'name': Destroy_Task, 'duration_secs': 1.521953} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2133.746995] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-127ff983-8e2f-41be-825a-333b4fe805bd tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Destroyed the VM [ 2133.747278] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-127ff983-8e2f-41be-825a-333b4fe805bd tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Deleting Snapshot of the VM instance {{(pid=62684) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2133.747550] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-4db9ea3b-7e76-4e23-a382-05591b58f87f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.755203] env[62684]: DEBUG oslo_vmware.api [None req-127ff983-8e2f-41be-825a-333b4fe805bd tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for the task: (returnval){ [ 2133.755203] env[62684]: value = "task-2053372" [ 2133.755203] env[62684]: _type = "Task" [ 2133.755203] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2133.765653] env[62684]: DEBUG oslo_vmware.api [None req-127ff983-8e2f-41be-825a-333b4fe805bd tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053372, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2133.856326] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e769d72d-08eb-4d6e-9e9e-924cbf4de177 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.865612] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc9270a2-c8fc-4f00-9f10-9057a9e8f7e4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.897544] env[62684]: DEBUG nova.compute.manager [req-32e20b49-9f94-42f4-8593-187bdb5b6d32 req-86a3f695-985c-474b-8e32-e4c801e3fbc9 service nova] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Detach interface failed, port_id=f01f26d4-f4bb-4d9a-b6dd-3451ffb353bb, reason: Instance 2eab4a07-9b92-436e-b4f8-fa64ae949b56 could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2134.011048] env[62684]: DEBUG oslo_vmware.api [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5283b17f-828f-062f-679f-8e3bddcc6023, 'name': SearchDatastore_Task, 'duration_secs': 0.02095} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2134.011740] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a6d4189-593e-4ca6-b5d2-2301af5db2f1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.017480] env[62684]: DEBUG oslo_vmware.api [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2134.017480] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5287485b-73c1-d4a1-4bda-6d0c322f1e19" [ 2134.017480] env[62684]: _type = "Task" [ 2134.017480] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2134.025982] env[62684]: DEBUG oslo_vmware.api [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5287485b-73c1-d4a1-4bda-6d0c322f1e19, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2134.071997] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Acquiring lock "7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2134.072264] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Lock "7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2134.131340] env[62684]: DEBUG nova.compute.utils [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2134.135702] env[62684]: DEBUG nova.compute.manager [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2134.135921] env[62684]: DEBUG nova.network.neutron [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2134.191479] env[62684]: DEBUG nova.policy [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e957449ae9d24bdaba38b3db704d3d61', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5cb4900a999e467bafdfd1fb407a82f4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2134.266122] env[62684]: DEBUG oslo_vmware.api [None req-127ff983-8e2f-41be-825a-333b4fe805bd tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053372, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2134.485771] env[62684]: DEBUG nova.network.neutron [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Successfully created port: b5747949-00d7-4815-9080-52285a6a8813 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2134.529212] env[62684]: DEBUG oslo_vmware.api [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5287485b-73c1-d4a1-4bda-6d0c322f1e19, 'name': SearchDatastore_Task, 'duration_secs': 0.013147} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2134.529588] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2134.529874] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 0a8d7c48-cf90-4baf-a900-38fbd62869a6/0a8d7c48-cf90-4baf-a900-38fbd62869a6.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2134.530238] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c8ff5df7-c5cb-4952-804f-71ceaa3e2ae4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.537844] env[62684]: DEBUG oslo_vmware.api [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2134.537844] env[62684]: value = "task-2053373" [ 2134.537844] env[62684]: _type = "Task" [ 2134.537844] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2134.547460] env[62684]: DEBUG oslo_vmware.api [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053373, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2134.576673] env[62684]: DEBUG nova.compute.manager [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2134.639023] env[62684]: DEBUG nova.compute.manager [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2134.765552] env[62684]: DEBUG oslo_vmware.api [None req-127ff983-8e2f-41be-825a-333b4fe805bd tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053372, 'name': RemoveSnapshot_Task, 'duration_secs': 0.918405} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2134.765874] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-127ff983-8e2f-41be-825a-333b4fe805bd tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Deleted Snapshot of the VM instance {{(pid=62684) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2134.891681] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09e92833-bb08-43e7-8ed3-002c383da08b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.901033] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-501fe4c3-8088-45bc-a92c-b403690644d4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.933611] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c03a599c-f6b8-4251-94d7-af4f75996343 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.942068] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af86d3f-58b1-4a64-8794-89641976bbab {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.956918] env[62684]: DEBUG nova.compute.provider_tree [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2135.048198] env[62684]: DEBUG oslo_vmware.api [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053373, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2135.095216] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2135.273189] env[62684]: WARNING nova.compute.manager [None req-127ff983-8e2f-41be-825a-333b4fe805bd tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Image not found during snapshot: nova.exception.ImageNotFound: Image 0fb22426-fb87-45f8-abe5-a3064061a9f3 could not be found. [ 2135.461194] env[62684]: DEBUG nova.scheduler.client.report [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2135.551244] env[62684]: DEBUG oslo_vmware.api [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053373, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.77099} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2135.551580] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 0a8d7c48-cf90-4baf-a900-38fbd62869a6/0a8d7c48-cf90-4baf-a900-38fbd62869a6.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2135.551808] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2135.552096] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-882be73f-bbf3-47af-bfb1-2a7b72e8e8b5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.558695] env[62684]: DEBUG oslo_vmware.api [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2135.558695] env[62684]: value = "task-2053374" [ 2135.558695] env[62684]: _type = "Task" [ 2135.558695] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2135.568233] env[62684]: DEBUG oslo_vmware.api [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053374, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2135.651295] env[62684]: DEBUG nova.compute.manager [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2135.676474] env[62684]: DEBUG nova.virt.hardware [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2135.676731] env[62684]: DEBUG nova.virt.hardware [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2135.676894] env[62684]: DEBUG nova.virt.hardware [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2135.677103] env[62684]: DEBUG nova.virt.hardware [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2135.677261] env[62684]: DEBUG nova.virt.hardware [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2135.677411] env[62684]: DEBUG nova.virt.hardware [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2135.677617] env[62684]: DEBUG nova.virt.hardware [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2135.677779] env[62684]: DEBUG nova.virt.hardware [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2135.677948] env[62684]: DEBUG nova.virt.hardware [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2135.678171] env[62684]: DEBUG nova.virt.hardware [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2135.678418] env[62684]: DEBUG nova.virt.hardware [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2135.679363] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22556164-0760-4467-ae09-c28ca5c57a27 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.688706] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16de56da-4bbc-4dd7-9498-ccd1184d5f65 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.765354] env[62684]: DEBUG oslo_concurrency.lockutils [None req-86c41030-fb97-4db7-b765-6b280ef43a8c tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Acquiring lock "e8c90faa-2c25-4308-9781-80d308b9211c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2135.765601] env[62684]: DEBUG oslo_concurrency.lockutils [None req-86c41030-fb97-4db7-b765-6b280ef43a8c tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Lock "e8c90faa-2c25-4308-9781-80d308b9211c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2135.765860] env[62684]: DEBUG oslo_concurrency.lockutils [None req-86c41030-fb97-4db7-b765-6b280ef43a8c tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Acquiring lock "e8c90faa-2c25-4308-9781-80d308b9211c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2135.766017] env[62684]: DEBUG oslo_concurrency.lockutils [None req-86c41030-fb97-4db7-b765-6b280ef43a8c tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Lock "e8c90faa-2c25-4308-9781-80d308b9211c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2135.766195] env[62684]: DEBUG oslo_concurrency.lockutils [None req-86c41030-fb97-4db7-b765-6b280ef43a8c tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Lock "e8c90faa-2c25-4308-9781-80d308b9211c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2135.768235] env[62684]: INFO nova.compute.manager [None req-86c41030-fb97-4db7-b765-6b280ef43a8c tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Terminating instance [ 2135.770253] env[62684]: DEBUG nova.compute.manager [None req-86c41030-fb97-4db7-b765-6b280ef43a8c tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2135.770611] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-86c41030-fb97-4db7-b765-6b280ef43a8c tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2135.772059] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fabd289-4280-45ff-84c8-0e4fda58fc82 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.779693] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-86c41030-fb97-4db7-b765-6b280ef43a8c tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2135.779932] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eaaafe02-07d4-4042-a5c8-d426335b071c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.786481] env[62684]: DEBUG oslo_vmware.api [None req-86c41030-fb97-4db7-b765-6b280ef43a8c tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for the task: (returnval){ [ 2135.786481] env[62684]: value = "task-2053375" [ 2135.786481] env[62684]: _type = "Task" [ 2135.786481] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2135.794102] env[62684]: DEBUG oslo_vmware.api [None req-86c41030-fb97-4db7-b765-6b280ef43a8c tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053375, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2135.969021] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.342s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2135.969021] env[62684]: DEBUG nova.compute.manager [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2135.972271] env[62684]: DEBUG oslo_concurrency.lockutils [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.302s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2135.975715] env[62684]: INFO nova.compute.claims [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2136.037409] env[62684]: DEBUG nova.compute.manager [req-f75fca32-0366-4060-b010-4a29cc82a262 req-e6bcbb37-8428-4cd0-9327-a630abe5142e service nova] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Received event network-vif-plugged-b5747949-00d7-4815-9080-52285a6a8813 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2136.037655] env[62684]: DEBUG oslo_concurrency.lockutils [req-f75fca32-0366-4060-b010-4a29cc82a262 req-e6bcbb37-8428-4cd0-9327-a630abe5142e service nova] Acquiring lock "f037d6b2-2082-4611-985e-b9a077eb8250-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2136.037869] env[62684]: DEBUG oslo_concurrency.lockutils [req-f75fca32-0366-4060-b010-4a29cc82a262 req-e6bcbb37-8428-4cd0-9327-a630abe5142e service nova] Lock "f037d6b2-2082-4611-985e-b9a077eb8250-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2136.038385] env[62684]: DEBUG oslo_concurrency.lockutils [req-f75fca32-0366-4060-b010-4a29cc82a262 req-e6bcbb37-8428-4cd0-9327-a630abe5142e service nova] Lock "f037d6b2-2082-4611-985e-b9a077eb8250-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2136.038632] env[62684]: DEBUG nova.compute.manager [req-f75fca32-0366-4060-b010-4a29cc82a262 req-e6bcbb37-8428-4cd0-9327-a630abe5142e service nova] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] No waiting events found dispatching network-vif-plugged-b5747949-00d7-4815-9080-52285a6a8813 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2136.039270] env[62684]: WARNING nova.compute.manager [req-f75fca32-0366-4060-b010-4a29cc82a262 req-e6bcbb37-8428-4cd0-9327-a630abe5142e service nova] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Received unexpected event network-vif-plugged-b5747949-00d7-4815-9080-52285a6a8813 for instance with vm_state building and task_state spawning. [ 2136.068975] env[62684]: DEBUG oslo_vmware.api [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053374, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.269921} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2136.069303] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2136.070107] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1c6444e-bd27-45f7-9c6d-2e6720ecb971 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.092653] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] 0a8d7c48-cf90-4baf-a900-38fbd62869a6/0a8d7c48-cf90-4baf-a900-38fbd62869a6.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2136.092914] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-11d6ae89-0569-46d4-9352-4c3694b78386 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.113194] env[62684]: DEBUG oslo_vmware.api [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2136.113194] env[62684]: value = "task-2053376" [ 2136.113194] env[62684]: _type = "Task" [ 2136.113194] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2136.124427] env[62684]: DEBUG oslo_vmware.api [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053376, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2136.177076] env[62684]: DEBUG nova.network.neutron [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Successfully updated port: b5747949-00d7-4815-9080-52285a6a8813 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2136.296152] env[62684]: DEBUG oslo_vmware.api [None req-86c41030-fb97-4db7-b765-6b280ef43a8c tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053375, 'name': PowerOffVM_Task, 'duration_secs': 0.43142} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2136.296393] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-86c41030-fb97-4db7-b765-6b280ef43a8c tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2136.296572] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-86c41030-fb97-4db7-b765-6b280ef43a8c tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2136.296830] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-adb8f351-3123-4b5a-95f1-4370b67242e4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.489635] env[62684]: DEBUG nova.compute.utils [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2136.494045] env[62684]: DEBUG nova.compute.manager [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2136.494045] env[62684]: DEBUG nova.network.neutron [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2136.547212] env[62684]: DEBUG nova.policy [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6e8b54745b53458eafe4d911d7d6d7d1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6c54f74085f343d2b790145b0d82a9f8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2136.624077] env[62684]: DEBUG oslo_vmware.api [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053376, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2136.680411] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "refresh_cache-f037d6b2-2082-4611-985e-b9a077eb8250" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2136.680580] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquired lock "refresh_cache-f037d6b2-2082-4611-985e-b9a077eb8250" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2136.680739] env[62684]: DEBUG nova.network.neutron [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2136.820947] env[62684]: DEBUG nova.network.neutron [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Successfully created port: 866e13c3-fdd0-4c15-97db-5002d50eabeb {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2136.997235] env[62684]: DEBUG nova.compute.manager [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2137.126463] env[62684]: DEBUG oslo_vmware.api [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053376, 'name': ReconfigVM_Task, 'duration_secs': 0.617283} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2137.131158] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Reconfigured VM instance instance-00000057 to attach disk [datastore1] 0a8d7c48-cf90-4baf-a900-38fbd62869a6/0a8d7c48-cf90-4baf-a900-38fbd62869a6.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2137.132259] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6cdba151-2b8e-4fd6-9c8b-c2a90a152278 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.139629] env[62684]: DEBUG oslo_vmware.api [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2137.139629] env[62684]: value = "task-2053378" [ 2137.139629] env[62684]: _type = "Task" [ 2137.139629] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2137.152773] env[62684]: DEBUG oslo_vmware.api [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053378, 'name': Rename_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2137.220575] env[62684]: DEBUG nova.network.neutron [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2137.262171] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aa0cdfd-7a4c-41c5-8cf5-e532ed66dc43 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.269914] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdf55f5c-913d-4c93-8643-87b7e6ba63e7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.305038] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-486730d9-a494-4c13-90db-6f80494b19c8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.313081] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daf0ce1e-5a2a-44a3-ab87-e1a1753f6f19 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.327186] env[62684]: DEBUG nova.compute.provider_tree [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2137.382440] env[62684]: DEBUG nova.network.neutron [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Updating instance_info_cache with network_info: [{"id": "b5747949-00d7-4815-9080-52285a6a8813", "address": "fa:16:3e:fd:34:0c", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5747949-00", "ovs_interfaceid": "b5747949-00d7-4815-9080-52285a6a8813", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2137.650257] env[62684]: DEBUG oslo_vmware.api [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053378, 'name': Rename_Task, 'duration_secs': 0.261116} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2137.650562] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2137.650810] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-029108eb-cd1a-418d-8bc3-549698d70002 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.657364] env[62684]: DEBUG oslo_vmware.api [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2137.657364] env[62684]: value = "task-2053379" [ 2137.657364] env[62684]: _type = "Task" [ 2137.657364] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2137.665155] env[62684]: DEBUG oslo_vmware.api [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053379, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2137.848994] env[62684]: ERROR nova.scheduler.client.report [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [req-d7171e80-513e-4706-b4ec-ae548dec624e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c23c281e-ec1f-4876-972e-a98655f2084f. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d7171e80-513e-4706-b4ec-ae548dec624e"}]} [ 2137.867234] env[62684]: DEBUG nova.scheduler.client.report [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Refreshing inventories for resource provider c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2137.882430] env[62684]: DEBUG nova.scheduler.client.report [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Updating ProviderTree inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2137.882684] env[62684]: DEBUG nova.compute.provider_tree [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2137.884980] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Releasing lock "refresh_cache-f037d6b2-2082-4611-985e-b9a077eb8250" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2137.885298] env[62684]: DEBUG nova.compute.manager [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Instance network_info: |[{"id": "b5747949-00d7-4815-9080-52285a6a8813", "address": "fa:16:3e:fd:34:0c", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5747949-00", "ovs_interfaceid": "b5747949-00d7-4815-9080-52285a6a8813", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2137.885894] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fd:34:0c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f880ac2e-d532-4f54-87bb-998a8d1bca78', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b5747949-00d7-4815-9080-52285a6a8813', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2137.893631] env[62684]: DEBUG oslo.service.loopingcall [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2137.893871] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2137.894131] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-427bbf72-d2c3-482e-b217-60a488d3a437 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.909914] env[62684]: DEBUG nova.scheduler.client.report [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Refreshing aggregate associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, aggregates: None {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2137.916558] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2137.916558] env[62684]: value = "task-2053380" [ 2137.916558] env[62684]: _type = "Task" [ 2137.916558] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2137.924149] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053380, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2137.930466] env[62684]: DEBUG nova.scheduler.client.report [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Refreshing trait associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2138.007353] env[62684]: DEBUG nova.compute.manager [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2138.039547] env[62684]: DEBUG nova.virt.hardware [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2138.039948] env[62684]: DEBUG nova.virt.hardware [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2138.040264] env[62684]: DEBUG nova.virt.hardware [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2138.040604] env[62684]: DEBUG nova.virt.hardware [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2138.041016] env[62684]: DEBUG nova.virt.hardware [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2138.041201] env[62684]: DEBUG nova.virt.hardware [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2138.041572] env[62684]: DEBUG nova.virt.hardware [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2138.041866] env[62684]: DEBUG nova.virt.hardware [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2138.042194] env[62684]: DEBUG nova.virt.hardware [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2138.042512] env[62684]: DEBUG nova.virt.hardware [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2138.042831] env[62684]: DEBUG nova.virt.hardware [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2138.044225] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51930b42-ebcf-4726-bac1-29c5ee7d24c8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2138.059180] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcc3ca21-d2de-4922-9c1b-3f945a6fca82 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2138.071766] env[62684]: DEBUG nova.compute.manager [req-1c63aa28-e12a-4aa8-9281-080685aa8521 req-1a3108a6-29ea-4f7f-9eec-45bf8424c5d5 service nova] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Received event network-changed-b5747949-00d7-4815-9080-52285a6a8813 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2138.072112] env[62684]: DEBUG nova.compute.manager [req-1c63aa28-e12a-4aa8-9281-080685aa8521 req-1a3108a6-29ea-4f7f-9eec-45bf8424c5d5 service nova] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Refreshing instance network info cache due to event network-changed-b5747949-00d7-4815-9080-52285a6a8813. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2138.072549] env[62684]: DEBUG oslo_concurrency.lockutils [req-1c63aa28-e12a-4aa8-9281-080685aa8521 req-1a3108a6-29ea-4f7f-9eec-45bf8424c5d5 service nova] Acquiring lock "refresh_cache-f037d6b2-2082-4611-985e-b9a077eb8250" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2138.072732] env[62684]: DEBUG oslo_concurrency.lockutils [req-1c63aa28-e12a-4aa8-9281-080685aa8521 req-1a3108a6-29ea-4f7f-9eec-45bf8424c5d5 service nova] Acquired lock "refresh_cache-f037d6b2-2082-4611-985e-b9a077eb8250" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2138.072997] env[62684]: DEBUG nova.network.neutron [req-1c63aa28-e12a-4aa8-9281-080685aa8521 req-1a3108a6-29ea-4f7f-9eec-45bf8424c5d5 service nova] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Refreshing network info cache for port b5747949-00d7-4815-9080-52285a6a8813 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2138.171565] env[62684]: DEBUG oslo_vmware.api [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053379, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2138.196038] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b4ab8fc7-785d-48ab-ada3-15723132742f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "57537508-06e7-43a4-95c5-c4399b8bf93f" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2138.196336] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b4ab8fc7-785d-48ab-ada3-15723132742f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "57537508-06e7-43a4-95c5-c4399b8bf93f" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2138.196566] env[62684]: INFO nova.compute.manager [None req-b4ab8fc7-785d-48ab-ada3-15723132742f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Rebooting instance [ 2138.203741] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7273706-277b-4e91-ae1c-8974d917aa06 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2138.212098] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-423a1288-8230-4ebf-9106-142b2bbacb4f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2138.244345] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a219a91e-917a-4eaa-b65c-0acdd8dfed28 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2138.252294] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c13a0f6-63dc-4ac4-a611-8520a50943fd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2138.266524] env[62684]: DEBUG nova.compute.provider_tree [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2138.426324] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053380, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2138.667555] env[62684]: DEBUG oslo_vmware.api [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053379, 'name': PowerOnVM_Task, 'duration_secs': 0.748819} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2138.669882] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2138.670118] env[62684]: INFO nova.compute.manager [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Took 9.14 seconds to spawn the instance on the hypervisor. [ 2138.670315] env[62684]: DEBUG nova.compute.manager [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2138.671157] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a08eb7a-3517-4f93-b090-7991ee99fd62 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2138.712262] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b4ab8fc7-785d-48ab-ada3-15723132742f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "refresh_cache-57537508-06e7-43a4-95c5-c4399b8bf93f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2138.712377] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b4ab8fc7-785d-48ab-ada3-15723132742f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquired lock "refresh_cache-57537508-06e7-43a4-95c5-c4399b8bf93f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2138.712518] env[62684]: DEBUG nova.network.neutron [None req-b4ab8fc7-785d-48ab-ada3-15723132742f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2138.799662] env[62684]: DEBUG nova.scheduler.client.report [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Updated inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f with generation 130 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2138.799662] env[62684]: DEBUG nova.compute.provider_tree [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Updating resource provider c23c281e-ec1f-4876-972e-a98655f2084f generation from 130 to 131 during operation: update_inventory {{(pid=62684) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2138.799662] env[62684]: DEBUG nova.compute.provider_tree [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2138.803982] env[62684]: DEBUG nova.network.neutron [req-1c63aa28-e12a-4aa8-9281-080685aa8521 req-1a3108a6-29ea-4f7f-9eec-45bf8424c5d5 service nova] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Updated VIF entry in instance network info cache for port b5747949-00d7-4815-9080-52285a6a8813. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2138.804306] env[62684]: DEBUG nova.network.neutron [req-1c63aa28-e12a-4aa8-9281-080685aa8521 req-1a3108a6-29ea-4f7f-9eec-45bf8424c5d5 service nova] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Updating instance_info_cache with network_info: [{"id": "b5747949-00d7-4815-9080-52285a6a8813", "address": "fa:16:3e:fd:34:0c", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5747949-00", "ovs_interfaceid": "b5747949-00d7-4815-9080-52285a6a8813", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2138.927129] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053380, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2139.188916] env[62684]: INFO nova.compute.manager [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Took 14.33 seconds to build instance. [ 2139.252785] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dbd2a3fb-2de9-40b6-928b-e7e57e21bdad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Acquiring lock "025dfe36-1f14-4bda-84a0-d424364b745b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2139.253341] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dbd2a3fb-2de9-40b6-928b-e7e57e21bdad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "025dfe36-1f14-4bda-84a0-d424364b745b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2139.253341] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dbd2a3fb-2de9-40b6-928b-e7e57e21bdad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Acquiring lock "025dfe36-1f14-4bda-84a0-d424364b745b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2139.253552] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dbd2a3fb-2de9-40b6-928b-e7e57e21bdad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "025dfe36-1f14-4bda-84a0-d424364b745b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2139.253552] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dbd2a3fb-2de9-40b6-928b-e7e57e21bdad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "025dfe36-1f14-4bda-84a0-d424364b745b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2139.255802] env[62684]: INFO nova.compute.manager [None req-dbd2a3fb-2de9-40b6-928b-e7e57e21bdad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Terminating instance [ 2139.258887] env[62684]: DEBUG nova.compute.manager [None req-dbd2a3fb-2de9-40b6-928b-e7e57e21bdad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2139.259283] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-dbd2a3fb-2de9-40b6-928b-e7e57e21bdad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2139.260082] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52c11eb9-9cd2-4b89-b83f-9901fa07b30c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.269220] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbd2a3fb-2de9-40b6-928b-e7e57e21bdad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2139.269499] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6c0a1273-691b-4997-b324-20e425db2ae7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.275457] env[62684]: DEBUG oslo_vmware.api [None req-dbd2a3fb-2de9-40b6-928b-e7e57e21bdad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Waiting for the task: (returnval){ [ 2139.275457] env[62684]: value = "task-2053381" [ 2139.275457] env[62684]: _type = "Task" [ 2139.275457] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2139.283793] env[62684]: DEBUG oslo_vmware.api [None req-dbd2a3fb-2de9-40b6-928b-e7e57e21bdad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053381, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2139.308058] env[62684]: DEBUG oslo_concurrency.lockutils [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.334s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2139.308058] env[62684]: DEBUG nova.compute.manager [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2139.310401] env[62684]: DEBUG oslo_concurrency.lockutils [req-1c63aa28-e12a-4aa8-9281-080685aa8521 req-1a3108a6-29ea-4f7f-9eec-45bf8424c5d5 service nova] Releasing lock "refresh_cache-f037d6b2-2082-4611-985e-b9a077eb8250" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2139.310401] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1b599c44-de5b-46f9-b7c2-0cb29f9e33ac tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.986s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2139.310401] env[62684]: DEBUG nova.objects.instance [None req-1b599c44-de5b-46f9-b7c2-0cb29f9e33ac tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Lazy-loading 'resources' on Instance uuid 2eab4a07-9b92-436e-b4f8-fa64ae949b56 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2139.429583] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053380, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2139.468334] env[62684]: DEBUG nova.network.neutron [None req-b4ab8fc7-785d-48ab-ada3-15723132742f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Updating instance_info_cache with network_info: [{"id": "0a96e2ce-2335-44e2-940d-26d3afbafa3a", "address": "fa:16:3e:6d:b8:02", "network": {"id": "1751424b-54a9-4879-9f32-aa15a9bb632c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-120070593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "263c101fcc5e493789b79dfd1ba97cc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92e4d027-e755-417b-8eea-9a8f24b85140", "external-id": "nsx-vlan-transportzone-756", "segmentation_id": 756, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a96e2ce-23", "ovs_interfaceid": "0a96e2ce-2335-44e2-940d-26d3afbafa3a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2139.691682] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a33f3acf-f709-423f-bfac-de9b307bc02d tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "0a8d7c48-cf90-4baf-a900-38fbd62869a6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.847s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2139.785778] env[62684]: DEBUG oslo_vmware.api [None req-dbd2a3fb-2de9-40b6-928b-e7e57e21bdad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053381, 'name': PowerOffVM_Task, 'duration_secs': 0.256345} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2139.786045] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbd2a3fb-2de9-40b6-928b-e7e57e21bdad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2139.786227] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-dbd2a3fb-2de9-40b6-928b-e7e57e21bdad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2139.786487] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-234d1081-8332-443d-a889-daefa8f9c651 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.818119] env[62684]: DEBUG nova.compute.utils [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2139.819700] env[62684]: DEBUG nova.compute.manager [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2139.819887] env[62684]: DEBUG nova.network.neutron [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2139.867693] env[62684]: DEBUG nova.policy [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '96b96927115d49f2a04342784717e58e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '607a0aa1049640d882d7dd490f5f98ea', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2139.928372] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053380, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2139.970987] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b4ab8fc7-785d-48ab-ada3-15723132742f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Releasing lock "refresh_cache-57537508-06e7-43a4-95c5-c4399b8bf93f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2139.975373] env[62684]: DEBUG nova.compute.manager [None req-b4ab8fc7-785d-48ab-ada3-15723132742f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2139.979522] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3272ac6-9de9-4b36-b651-2f041293bef5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.056279] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-994e08b2-ea73-440d-a4d2-0321daaf2677 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.064335] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa5cfa8b-cce0-469e-9704-6e55ba86d405 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.095517] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c56ac72-61cf-4c16-a57b-bc0daa85dc1f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.103036] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82cafa2b-38d2-4dcc-90cf-1f7f3616cf15 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.117458] env[62684]: DEBUG nova.compute.provider_tree [None req-1b599c44-de5b-46f9-b7c2-0cb29f9e33ac tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2140.159955] env[62684]: DEBUG nova.network.neutron [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Successfully created port: ca16d302-c6d1-48a0-ac08-8031db433cc7 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2140.324139] env[62684]: DEBUG nova.compute.manager [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2140.429596] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053380, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2140.624991] env[62684]: DEBUG nova.scheduler.client.report [None req-1b599c44-de5b-46f9-b7c2-0cb29f9e33ac tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2140.931135] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053380, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2141.002281] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1649977-1fef-4b83-8296-d3f21b6bf378 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.009521] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b4ab8fc7-785d-48ab-ada3-15723132742f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Doing hard reboot of VM {{(pid=62684) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 2141.009894] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-41672e55-3fc7-4aa0-a2f2-a6a42e98c87c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.016184] env[62684]: DEBUG oslo_vmware.api [None req-b4ab8fc7-785d-48ab-ada3-15723132742f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2141.016184] env[62684]: value = "task-2053383" [ 2141.016184] env[62684]: _type = "Task" [ 2141.016184] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2141.024978] env[62684]: DEBUG oslo_vmware.api [None req-b4ab8fc7-785d-48ab-ada3-15723132742f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053383, 'name': ResetVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2141.130538] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1b599c44-de5b-46f9-b7c2-0cb29f9e33ac tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.820s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2141.132990] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.038s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2141.134588] env[62684]: INFO nova.compute.claims [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2141.160760] env[62684]: INFO nova.scheduler.client.report [None req-1b599c44-de5b-46f9-b7c2-0cb29f9e33ac tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Deleted allocations for instance 2eab4a07-9b92-436e-b4f8-fa64ae949b56 [ 2141.192683] env[62684]: DEBUG nova.compute.manager [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Stashing vm_state: active {{(pid=62684) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 2141.333908] env[62684]: DEBUG nova.compute.manager [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2141.363018] env[62684]: DEBUG nova.virt.hardware [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2141.363018] env[62684]: DEBUG nova.virt.hardware [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2141.363018] env[62684]: DEBUG nova.virt.hardware [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2141.363018] env[62684]: DEBUG nova.virt.hardware [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2141.363018] env[62684]: DEBUG nova.virt.hardware [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2141.363339] env[62684]: DEBUG nova.virt.hardware [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2141.363339] env[62684]: DEBUG nova.virt.hardware [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2141.363454] env[62684]: DEBUG nova.virt.hardware [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2141.363652] env[62684]: DEBUG nova.virt.hardware [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2141.363829] env[62684]: DEBUG nova.virt.hardware [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2141.364015] env[62684]: DEBUG nova.virt.hardware [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2141.364906] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a67d8ca-d819-4fcb-a5f7-2db1aa377437 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.373412] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdf2f8c1-5c66-4214-a738-45fb5a77080b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.414722] env[62684]: DEBUG oslo_concurrency.lockutils [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquiring lock "daf1486b-d5c2-4341-8a27-36eeeb08cd26" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2141.414997] env[62684]: DEBUG oslo_concurrency.lockutils [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lock "daf1486b-d5c2-4341-8a27-36eeeb08cd26" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2141.415219] env[62684]: INFO nova.compute.manager [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Shelving [ 2141.430503] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053380, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2141.527917] env[62684]: DEBUG oslo_vmware.api [None req-b4ab8fc7-785d-48ab-ada3-15723132742f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053383, 'name': ResetVM_Task, 'duration_secs': 0.101269} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2141.527917] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b4ab8fc7-785d-48ab-ada3-15723132742f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Did hard reboot of VM {{(pid=62684) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 2141.528039] env[62684]: DEBUG nova.compute.manager [None req-b4ab8fc7-785d-48ab-ada3-15723132742f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2141.528829] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86b24c85-6c9f-41a0-b7c5-529f956dd5ff {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.667883] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1b599c44-de5b-46f9-b7c2-0cb29f9e33ac tempest-ServerMetadataTestJSON-1758398841 tempest-ServerMetadataTestJSON-1758398841-project-member] Lock "2eab4a07-9b92-436e-b4f8-fa64ae949b56" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.866s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2141.709230] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2141.923039] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2141.923398] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-73a1f4e4-b6da-4f29-a3eb-ca4b71e0be34 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.934103] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053380, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2141.935453] env[62684]: DEBUG oslo_vmware.api [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 2141.935453] env[62684]: value = "task-2053384" [ 2141.935453] env[62684]: _type = "Task" [ 2141.935453] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2141.943764] env[62684]: DEBUG oslo_vmware.api [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053384, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2142.041266] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b4ab8fc7-785d-48ab-ada3-15723132742f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "57537508-06e7-43a4-95c5-c4399b8bf93f" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 3.845s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2142.364132] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a3beac2-668d-4bdb-995b-9db78fda035b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.372587] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05959019-6c62-4410-9e4d-4a4748b099a8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.404129] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef12f960-bfd9-4f07-be66-91263e5d9d4e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.410755] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-351dc45c-4129-4fae-8fc4-e8cdb5b9cadf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.425435] env[62684]: DEBUG nova.compute.provider_tree [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2142.435534] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053380, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2142.445204] env[62684]: DEBUG oslo_vmware.api [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053384, 'name': PowerOffVM_Task, 'duration_secs': 0.287693} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2142.445532] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2142.446369] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d432723f-f4ed-42ff-aeeb-32165f99f3f3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.467313] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf10fe1a-642e-4fe1-843a-f9e421470587 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.930216] env[62684]: DEBUG nova.scheduler.client.report [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2142.939529] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053380, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2142.979813] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Creating Snapshot of the VM instance {{(pid=62684) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2142.980190] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-dc20f1fd-dc7c-4a3d-b145-8c41c5872fd2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.990421] env[62684]: DEBUG oslo_vmware.api [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 2142.990421] env[62684]: value = "task-2053385" [ 2142.990421] env[62684]: _type = "Task" [ 2142.990421] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2142.999635] env[62684]: DEBUG oslo_vmware.api [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053385, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2143.437364] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.304s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2143.437901] env[62684]: DEBUG nova.compute.manager [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2143.440513] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053380, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2143.441951] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 1.732s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2143.454899] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-86c41030-fb97-4db7-b765-6b280ef43a8c tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2143.454899] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-86c41030-fb97-4db7-b765-6b280ef43a8c tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2143.454899] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-86c41030-fb97-4db7-b765-6b280ef43a8c tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Deleting the datastore file [datastore2] e8c90faa-2c25-4308-9781-80d308b9211c {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2143.454899] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-043d61ff-a87c-4337-bf66-7569cd0bb8f3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.464270] env[62684]: DEBUG oslo_vmware.api [None req-86c41030-fb97-4db7-b765-6b280ef43a8c tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for the task: (returnval){ [ 2143.464270] env[62684]: value = "task-2053386" [ 2143.464270] env[62684]: _type = "Task" [ 2143.464270] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2143.469886] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-dbd2a3fb-2de9-40b6-928b-e7e57e21bdad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2143.470351] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-dbd2a3fb-2de9-40b6-928b-e7e57e21bdad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2143.470716] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbd2a3fb-2de9-40b6-928b-e7e57e21bdad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Deleting the datastore file [datastore2] 025dfe36-1f14-4bda-84a0-d424364b745b {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2143.471467] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-106a0bea-63ac-4750-b35b-74c582add774 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.476880] env[62684]: DEBUG oslo_vmware.api [None req-86c41030-fb97-4db7-b765-6b280ef43a8c tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053386, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2143.483018] env[62684]: DEBUG oslo_vmware.api [None req-dbd2a3fb-2de9-40b6-928b-e7e57e21bdad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Waiting for the task: (returnval){ [ 2143.483018] env[62684]: value = "task-2053387" [ 2143.483018] env[62684]: _type = "Task" [ 2143.483018] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2143.489978] env[62684]: DEBUG oslo_vmware.api [None req-dbd2a3fb-2de9-40b6-928b-e7e57e21bdad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053387, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2143.499056] env[62684]: DEBUG oslo_vmware.api [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053385, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2143.937077] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053380, 'name': CreateVM_Task, 'duration_secs': 5.591912} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2143.937249] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2143.937946] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2143.938168] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2143.938497] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2143.938760] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2606bfad-11cf-453f-ba65-debff937b6ed {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.943467] env[62684]: DEBUG nova.compute.utils [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2143.945370] env[62684]: DEBUG nova.compute.manager [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2143.945535] env[62684]: DEBUG nova.network.neutron [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2143.949160] env[62684]: INFO nova.compute.claims [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2143.954455] env[62684]: DEBUG oslo_vmware.api [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 2143.954455] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52cf46d6-0695-eef0-ee69-45fea253ea96" [ 2143.954455] env[62684]: _type = "Task" [ 2143.954455] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2143.955435] env[62684]: DEBUG nova.compute.manager [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2143.966929] env[62684]: DEBUG oslo_vmware.api [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52cf46d6-0695-eef0-ee69-45fea253ea96, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2143.977031] env[62684]: DEBUG oslo_vmware.api [None req-86c41030-fb97-4db7-b765-6b280ef43a8c tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053386, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170359} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2143.977031] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-86c41030-fb97-4db7-b765-6b280ef43a8c tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2143.977031] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-86c41030-fb97-4db7-b765-6b280ef43a8c tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2143.977031] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-86c41030-fb97-4db7-b765-6b280ef43a8c tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2143.977031] env[62684]: INFO nova.compute.manager [None req-86c41030-fb97-4db7-b765-6b280ef43a8c tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Took 8.21 seconds to destroy the instance on the hypervisor. [ 2143.977031] env[62684]: DEBUG oslo.service.loopingcall [None req-86c41030-fb97-4db7-b765-6b280ef43a8c tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2143.977318] env[62684]: DEBUG nova.compute.manager [-] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2143.977403] env[62684]: DEBUG nova.network.neutron [-] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2143.990148] env[62684]: DEBUG oslo_vmware.api [None req-dbd2a3fb-2de9-40b6-928b-e7e57e21bdad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053387, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.343757} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2143.990951] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbd2a3fb-2de9-40b6-928b-e7e57e21bdad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2143.991163] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-dbd2a3fb-2de9-40b6-928b-e7e57e21bdad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2143.991353] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-dbd2a3fb-2de9-40b6-928b-e7e57e21bdad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2143.991527] env[62684]: INFO nova.compute.manager [None req-dbd2a3fb-2de9-40b6-928b-e7e57e21bdad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Took 4.73 seconds to destroy the instance on the hypervisor. [ 2143.991762] env[62684]: DEBUG oslo.service.loopingcall [None req-dbd2a3fb-2de9-40b6-928b-e7e57e21bdad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2143.992031] env[62684]: DEBUG nova.compute.manager [-] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2143.992132] env[62684]: DEBUG nova.network.neutron [-] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2144.002487] env[62684]: DEBUG oslo_vmware.api [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053385, 'name': CreateSnapshot_Task, 'duration_secs': 0.599417} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2144.002731] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Created Snapshot of the VM instance {{(pid=62684) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2144.003510] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73edce1b-aeaa-4753-b68d-a70fb52076cf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.022749] env[62684]: DEBUG nova.policy [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0f3a4c8387a64e32947880017d2abeb6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '43304d5c52344bd9841dbc760a174b4f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2144.425094] env[62684]: DEBUG nova.compute.manager [req-58cf3560-70d7-4f48-8446-0175c1642936 req-ab8a6e8a-222e-4034-bb6e-e004b3fad96b service nova] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Received event network-vif-deleted-f0464f82-ef8e-43bd-a863-de4b524e43c8 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2144.425335] env[62684]: INFO nova.compute.manager [req-58cf3560-70d7-4f48-8446-0175c1642936 req-ab8a6e8a-222e-4034-bb6e-e004b3fad96b service nova] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Neutron deleted interface f0464f82-ef8e-43bd-a863-de4b524e43c8; detaching it from the instance and deleting it from the info cache [ 2144.425533] env[62684]: DEBUG nova.network.neutron [req-58cf3560-70d7-4f48-8446-0175c1642936 req-ab8a6e8a-222e-4034-bb6e-e004b3fad96b service nova] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2144.459868] env[62684]: INFO nova.compute.resource_tracker [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Updating resource usage from migration 951ca425-53d8-45a1-867b-812651c44c78 [ 2144.483201] env[62684]: DEBUG oslo_vmware.api [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52cf46d6-0695-eef0-ee69-45fea253ea96, 'name': SearchDatastore_Task, 'duration_secs': 0.023328} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2144.483488] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2144.483777] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2144.484037] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2144.484198] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2144.484387] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2144.484843] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b0f142fd-cbd9-4117-9698-c9710f0f86e5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.501657] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2144.501911] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2144.503672] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55b22747-11f0-446f-a562-fc4ed2d7ea57 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.510270] env[62684]: DEBUG nova.network.neutron [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Successfully created port: 9755599a-bf6c-415f-b6dc-88d5d3774944 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2144.520393] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Creating linked-clone VM from snapshot {{(pid=62684) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2144.522139] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-e39edfae-609d-4d77-90ec-2e2065892422 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.525182] env[62684]: DEBUG oslo_vmware.api [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 2144.525182] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b9650f-94e2-24a7-37a8-8e82a97e9245" [ 2144.525182] env[62684]: _type = "Task" [ 2144.525182] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2144.536919] env[62684]: DEBUG oslo_vmware.api [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 2144.536919] env[62684]: value = "task-2053388" [ 2144.536919] env[62684]: _type = "Task" [ 2144.536919] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2144.537766] env[62684]: DEBUG oslo_vmware.api [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b9650f-94e2-24a7-37a8-8e82a97e9245, 'name': SearchDatastore_Task, 'duration_secs': 0.014879} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2144.543116] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3138f15f-d2c2-4e49-ab07-f294daba2b66 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.548727] env[62684]: DEBUG oslo_vmware.api [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 2144.548727] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52666c73-f466-ea6d-4c50-0e3099f171ed" [ 2144.548727] env[62684]: _type = "Task" [ 2144.548727] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2144.551810] env[62684]: DEBUG oslo_vmware.api [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053388, 'name': CloneVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2144.564814] env[62684]: DEBUG oslo_vmware.api [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52666c73-f466-ea6d-4c50-0e3099f171ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2144.596331] env[62684]: DEBUG nova.compute.manager [req-ca262eb3-f23c-4a2e-8478-b649fffa2e24 req-fea2136c-d66e-4141-9b96-8af9eeb0ded8 service nova] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Received event network-vif-plugged-866e13c3-fdd0-4c15-97db-5002d50eabeb {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2144.596331] env[62684]: DEBUG oslo_concurrency.lockutils [req-ca262eb3-f23c-4a2e-8478-b649fffa2e24 req-fea2136c-d66e-4141-9b96-8af9eeb0ded8 service nova] Acquiring lock "9418b42d-9fff-41fd-92d1-a832017fc9c3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2144.596331] env[62684]: DEBUG oslo_concurrency.lockutils [req-ca262eb3-f23c-4a2e-8478-b649fffa2e24 req-fea2136c-d66e-4141-9b96-8af9eeb0ded8 service nova] Lock "9418b42d-9fff-41fd-92d1-a832017fc9c3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2144.596331] env[62684]: DEBUG oslo_concurrency.lockutils [req-ca262eb3-f23c-4a2e-8478-b649fffa2e24 req-fea2136c-d66e-4141-9b96-8af9eeb0ded8 service nova] Lock "9418b42d-9fff-41fd-92d1-a832017fc9c3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2144.596331] env[62684]: DEBUG nova.compute.manager [req-ca262eb3-f23c-4a2e-8478-b649fffa2e24 req-fea2136c-d66e-4141-9b96-8af9eeb0ded8 service nova] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] No waiting events found dispatching network-vif-plugged-866e13c3-fdd0-4c15-97db-5002d50eabeb {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2144.596331] env[62684]: WARNING nova.compute.manager [req-ca262eb3-f23c-4a2e-8478-b649fffa2e24 req-fea2136c-d66e-4141-9b96-8af9eeb0ded8 service nova] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Received unexpected event network-vif-plugged-866e13c3-fdd0-4c15-97db-5002d50eabeb for instance with vm_state building and task_state spawning. [ 2144.733571] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77cf1c0c-88a4-461c-9512-c3e7bd007495 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.742117] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e332e407-374d-463a-9806-4e43de8d1449 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.773386] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5834d1dd-cc89-4ac5-afbf-6b431a58c960 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.781784] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b84c3c24-13ff-4710-aa3a-6b8a1f4d8eed {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.795681] env[62684]: DEBUG nova.compute.provider_tree [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2144.813677] env[62684]: DEBUG nova.network.neutron [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Successfully updated port: 866e13c3-fdd0-4c15-97db-5002d50eabeb {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2144.829300] env[62684]: DEBUG nova.network.neutron [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Successfully updated port: ca16d302-c6d1-48a0-ac08-8031db433cc7 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2144.907875] env[62684]: DEBUG nova.network.neutron [-] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2144.928224] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-586a422d-dd77-444f-b638-d14f0ec554d4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.939127] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3f2d10c-a90e-48e2-9486-dc354d7220b5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.972023] env[62684]: DEBUG nova.compute.manager [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2144.974213] env[62684]: DEBUG nova.compute.manager [req-58cf3560-70d7-4f48-8446-0175c1642936 req-ab8a6e8a-222e-4034-bb6e-e004b3fad96b service nova] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Detach interface failed, port_id=f0464f82-ef8e-43bd-a863-de4b524e43c8, reason: Instance e8c90faa-2c25-4308-9781-80d308b9211c could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2144.998594] env[62684]: DEBUG nova.virt.hardware [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2144.998849] env[62684]: DEBUG nova.virt.hardware [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2144.999017] env[62684]: DEBUG nova.virt.hardware [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2144.999297] env[62684]: DEBUG nova.virt.hardware [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2144.999752] env[62684]: DEBUG nova.virt.hardware [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2144.999935] env[62684]: DEBUG nova.virt.hardware [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2145.000395] env[62684]: DEBUG nova.virt.hardware [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2145.000586] env[62684]: DEBUG nova.virt.hardware [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2145.000770] env[62684]: DEBUG nova.virt.hardware [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2145.000942] env[62684]: DEBUG nova.virt.hardware [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2145.001136] env[62684]: DEBUG nova.virt.hardware [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2145.002078] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-989f3ec7-2897-4260-ae15-5a94db31563f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.010872] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01fc89ef-e948-4f7f-949e-3595484d496c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.045726] env[62684]: DEBUG oslo_vmware.api [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053388, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2145.060827] env[62684]: DEBUG oslo_vmware.api [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52666c73-f466-ea6d-4c50-0e3099f171ed, 'name': SearchDatastore_Task, 'duration_secs': 0.019833} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2145.061163] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2145.061462] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] f037d6b2-2082-4611-985e-b9a077eb8250/f037d6b2-2082-4611-985e-b9a077eb8250.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2145.061692] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-001e2760-c12a-4a1a-aab1-3183b4a319b5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.067687] env[62684]: DEBUG oslo_vmware.api [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 2145.067687] env[62684]: value = "task-2053389" [ 2145.067687] env[62684]: _type = "Task" [ 2145.067687] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2145.075648] env[62684]: DEBUG oslo_vmware.api [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053389, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2145.189441] env[62684]: DEBUG nova.network.neutron [-] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2145.317443] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "refresh_cache-9418b42d-9fff-41fd-92d1-a832017fc9c3" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2145.317443] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquired lock "refresh_cache-9418b42d-9fff-41fd-92d1-a832017fc9c3" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2145.317443] env[62684]: DEBUG nova.network.neutron [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2145.319099] env[62684]: ERROR nova.scheduler.client.report [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [req-0c829693-7f6a-4c22-a891-f87aa875231d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c23c281e-ec1f-4876-972e-a98655f2084f. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-0c829693-7f6a-4c22-a891-f87aa875231d"}]} [ 2145.331158] env[62684]: DEBUG oslo_concurrency.lockutils [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "refresh_cache-aebbc2cc-8973-4907-9ec8-085027fd7ca3" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2145.331433] env[62684]: DEBUG oslo_concurrency.lockutils [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquired lock "refresh_cache-aebbc2cc-8973-4907-9ec8-085027fd7ca3" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2145.331433] env[62684]: DEBUG nova.network.neutron [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2145.339909] env[62684]: DEBUG nova.scheduler.client.report [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Refreshing inventories for resource provider c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2145.357887] env[62684]: DEBUG nova.scheduler.client.report [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Updating ProviderTree inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2145.358222] env[62684]: DEBUG nova.compute.provider_tree [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2145.371084] env[62684]: DEBUG nova.scheduler.client.report [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Refreshing aggregate associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, aggregates: None {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2145.392033] env[62684]: DEBUG nova.scheduler.client.report [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Refreshing trait associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2145.413027] env[62684]: INFO nova.compute.manager [-] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Took 1.43 seconds to deallocate network for instance. [ 2145.550309] env[62684]: DEBUG oslo_vmware.api [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053388, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2145.582100] env[62684]: DEBUG oslo_vmware.api [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053389, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.508195} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2145.582635] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] f037d6b2-2082-4611-985e-b9a077eb8250/f037d6b2-2082-4611-985e-b9a077eb8250.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2145.583040] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2145.583467] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f1d64d2a-585c-49d8-9b80-2c81c737d08d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.591673] env[62684]: DEBUG oslo_vmware.api [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 2145.591673] env[62684]: value = "task-2053390" [ 2145.591673] env[62684]: _type = "Task" [ 2145.591673] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2145.605390] env[62684]: DEBUG oslo_vmware.api [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053390, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2145.649901] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-854b5e77-5d85-405f-a76f-f1055f6472a5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.658258] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8df2fe73-1ce4-4d6c-b54d-ed4d3f5a151f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.689213] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2c040e0-25da-47d7-afba-5e3abf517f20 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.691932] env[62684]: INFO nova.compute.manager [-] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Took 1.70 seconds to deallocate network for instance. [ 2145.699437] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1452a4ba-c90f-40a9-a0f3-53ff67bac853 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.713370] env[62684]: DEBUG nova.compute.provider_tree [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2145.867650] env[62684]: DEBUG nova.network.neutron [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2145.871420] env[62684]: DEBUG nova.network.neutron [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2145.925493] env[62684]: DEBUG oslo_concurrency.lockutils [None req-86c41030-fb97-4db7-b765-6b280ef43a8c tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2146.029328] env[62684]: DEBUG nova.network.neutron [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Updating instance_info_cache with network_info: [{"id": "866e13c3-fdd0-4c15-97db-5002d50eabeb", "address": "fa:16:3e:7b:e9:ee", "network": {"id": "aa52badb-0b73-48bc-afaa-5e06a97d5c7d", "bridge": "br-int", "label": "tempest-ServersTestJSON-556342067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c54f74085f343d2b790145b0d82a9f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap866e13c3-fd", "ovs_interfaceid": "866e13c3-fdd0-4c15-97db-5002d50eabeb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2146.049757] env[62684]: DEBUG oslo_vmware.api [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053388, 'name': CloneVM_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2146.104686] env[62684]: DEBUG oslo_vmware.api [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053390, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075794} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2146.104686] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2146.104686] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08d19321-eba2-4753-9ce3-f538c2d06953 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.129064] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] f037d6b2-2082-4611-985e-b9a077eb8250/f037d6b2-2082-4611-985e-b9a077eb8250.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2146.132373] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e681e1fb-4aa6-4670-a772-6533be9dc4ea {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.155713] env[62684]: DEBUG oslo_vmware.api [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 2146.155713] env[62684]: value = "task-2053391" [ 2146.155713] env[62684]: _type = "Task" [ 2146.155713] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2146.164637] env[62684]: DEBUG oslo_vmware.api [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053391, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2146.200234] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dbd2a3fb-2de9-40b6-928b-e7e57e21bdad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2146.251443] env[62684]: DEBUG nova.scheduler.client.report [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Updated inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f with generation 133 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2146.251443] env[62684]: DEBUG nova.compute.provider_tree [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Updating resource provider c23c281e-ec1f-4876-972e-a98655f2084f generation from 133 to 134 during operation: update_inventory {{(pid=62684) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2146.251443] env[62684]: DEBUG nova.compute.provider_tree [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2146.260272] env[62684]: DEBUG nova.network.neutron [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Updating instance_info_cache with network_info: [{"id": "ca16d302-c6d1-48a0-ac08-8031db433cc7", "address": "fa:16:3e:02:7e:42", "network": {"id": "b24dd0c0-a394-4ca6-a79a-94535bc1df6f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2023102141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "607a0aa1049640d882d7dd490f5f98ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca16d302-c6", "ovs_interfaceid": "ca16d302-c6d1-48a0-ac08-8031db433cc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2146.536272] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Releasing lock "refresh_cache-9418b42d-9fff-41fd-92d1-a832017fc9c3" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2146.536272] env[62684]: DEBUG nova.compute.manager [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Instance network_info: |[{"id": "866e13c3-fdd0-4c15-97db-5002d50eabeb", "address": "fa:16:3e:7b:e9:ee", "network": {"id": "aa52badb-0b73-48bc-afaa-5e06a97d5c7d", "bridge": "br-int", "label": "tempest-ServersTestJSON-556342067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c54f74085f343d2b790145b0d82a9f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap866e13c3-fd", "ovs_interfaceid": "866e13c3-fdd0-4c15-97db-5002d50eabeb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2146.536272] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7b:e9:ee', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1fb81f98-6f5a-47ab-a512-27277591d064', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '866e13c3-fdd0-4c15-97db-5002d50eabeb', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2146.542728] env[62684]: DEBUG oslo.service.loopingcall [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2146.544520] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2146.546199] env[62684]: DEBUG nova.compute.manager [req-89788043-1f6f-4430-b9de-4ab65440b54a req-d38fa7fa-8e99-4959-867a-b2512f69c2c7 service nova] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Received event network-vif-deleted-1d1c0f31-e026-45f0-b3c8-5ba02555e863 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2146.546696] env[62684]: DEBUG nova.compute.manager [req-89788043-1f6f-4430-b9de-4ab65440b54a req-d38fa7fa-8e99-4959-867a-b2512f69c2c7 service nova] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Received event network-vif-plugged-ca16d302-c6d1-48a0-ac08-8031db433cc7 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2146.547150] env[62684]: DEBUG oslo_concurrency.lockutils [req-89788043-1f6f-4430-b9de-4ab65440b54a req-d38fa7fa-8e99-4959-867a-b2512f69c2c7 service nova] Acquiring lock "aebbc2cc-8973-4907-9ec8-085027fd7ca3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2146.547934] env[62684]: DEBUG oslo_concurrency.lockutils [req-89788043-1f6f-4430-b9de-4ab65440b54a req-d38fa7fa-8e99-4959-867a-b2512f69c2c7 service nova] Lock "aebbc2cc-8973-4907-9ec8-085027fd7ca3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2146.548279] env[62684]: DEBUG oslo_concurrency.lockutils [req-89788043-1f6f-4430-b9de-4ab65440b54a req-d38fa7fa-8e99-4959-867a-b2512f69c2c7 service nova] Lock "aebbc2cc-8973-4907-9ec8-085027fd7ca3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2146.548989] env[62684]: DEBUG nova.compute.manager [req-89788043-1f6f-4430-b9de-4ab65440b54a req-d38fa7fa-8e99-4959-867a-b2512f69c2c7 service nova] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] No waiting events found dispatching network-vif-plugged-ca16d302-c6d1-48a0-ac08-8031db433cc7 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2146.549343] env[62684]: WARNING nova.compute.manager [req-89788043-1f6f-4430-b9de-4ab65440b54a req-d38fa7fa-8e99-4959-867a-b2512f69c2c7 service nova] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Received unexpected event network-vif-plugged-ca16d302-c6d1-48a0-ac08-8031db433cc7 for instance with vm_state building and task_state spawning. [ 2146.549659] env[62684]: DEBUG nova.compute.manager [req-89788043-1f6f-4430-b9de-4ab65440b54a req-d38fa7fa-8e99-4959-867a-b2512f69c2c7 service nova] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Received event network-changed-ca16d302-c6d1-48a0-ac08-8031db433cc7 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2146.549944] env[62684]: DEBUG nova.compute.manager [req-89788043-1f6f-4430-b9de-4ab65440b54a req-d38fa7fa-8e99-4959-867a-b2512f69c2c7 service nova] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Refreshing instance network info cache due to event network-changed-ca16d302-c6d1-48a0-ac08-8031db433cc7. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2146.550272] env[62684]: DEBUG oslo_concurrency.lockutils [req-89788043-1f6f-4430-b9de-4ab65440b54a req-d38fa7fa-8e99-4959-867a-b2512f69c2c7 service nova] Acquiring lock "refresh_cache-aebbc2cc-8973-4907-9ec8-085027fd7ca3" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2146.553674] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8c8f2785-12b2-4036-8608-f980cf51ff2f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.574681] env[62684]: DEBUG oslo_vmware.api [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053388, 'name': CloneVM_Task, 'duration_secs': 1.55973} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2146.576360] env[62684]: INFO nova.virt.vmwareapi.vmops [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Created linked-clone VM from snapshot [ 2146.578246] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2146.578246] env[62684]: value = "task-2053392" [ 2146.578246] env[62684]: _type = "Task" [ 2146.578246] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2146.578246] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d84b813e-57d9-4e58-8148-99a554ef4502 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.590186] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Uploading image b5ede0c6-ad0d-4c75-b005-a332dfdc71df {{(pid=62684) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2146.596903] env[62684]: DEBUG nova.network.neutron [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Successfully updated port: 9755599a-bf6c-415f-b6dc-88d5d3774944 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2146.597126] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053392, 'name': CreateVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2146.620264] env[62684]: DEBUG nova.compute.manager [req-56bb2d74-0b48-41d0-bc93-79bc69cfc3a5 req-e0e18376-5f00-4046-beb2-a47528189595 service nova] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Received event network-changed-866e13c3-fdd0-4c15-97db-5002d50eabeb {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2146.620486] env[62684]: DEBUG nova.compute.manager [req-56bb2d74-0b48-41d0-bc93-79bc69cfc3a5 req-e0e18376-5f00-4046-beb2-a47528189595 service nova] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Refreshing instance network info cache due to event network-changed-866e13c3-fdd0-4c15-97db-5002d50eabeb. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2146.620815] env[62684]: DEBUG oslo_concurrency.lockutils [req-56bb2d74-0b48-41d0-bc93-79bc69cfc3a5 req-e0e18376-5f00-4046-beb2-a47528189595 service nova] Acquiring lock "refresh_cache-9418b42d-9fff-41fd-92d1-a832017fc9c3" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2146.620998] env[62684]: DEBUG oslo_concurrency.lockutils [req-56bb2d74-0b48-41d0-bc93-79bc69cfc3a5 req-e0e18376-5f00-4046-beb2-a47528189595 service nova] Acquired lock "refresh_cache-9418b42d-9fff-41fd-92d1-a832017fc9c3" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2146.621171] env[62684]: DEBUG nova.network.neutron [req-56bb2d74-0b48-41d0-bc93-79bc69cfc3a5 req-e0e18376-5f00-4046-beb2-a47528189595 service nova] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Refreshing network info cache for port 866e13c3-fdd0-4c15-97db-5002d50eabeb {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2146.624419] env[62684]: DEBUG oslo_vmware.rw_handles [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2146.624419] env[62684]: value = "vm-421362" [ 2146.624419] env[62684]: _type = "VirtualMachine" [ 2146.624419] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2146.625309] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-031efc84-fd8f-4789-890d-9baf30b44b7d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.632422] env[62684]: DEBUG oslo_vmware.rw_handles [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lease: (returnval){ [ 2146.632422] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52fb76eb-53bd-3579-2a14-9586ce76a4b4" [ 2146.632422] env[62684]: _type = "HttpNfcLease" [ 2146.632422] env[62684]: } obtained for exporting VM: (result){ [ 2146.632422] env[62684]: value = "vm-421362" [ 2146.632422] env[62684]: _type = "VirtualMachine" [ 2146.632422] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2146.632690] env[62684]: DEBUG oslo_vmware.api [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the lease: (returnval){ [ 2146.632690] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52fb76eb-53bd-3579-2a14-9586ce76a4b4" [ 2146.632690] env[62684]: _type = "HttpNfcLease" [ 2146.632690] env[62684]: } to be ready. {{(pid=62684) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2146.639661] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2146.639661] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52fb76eb-53bd-3579-2a14-9586ce76a4b4" [ 2146.639661] env[62684]: _type = "HttpNfcLease" [ 2146.639661] env[62684]: } is initializing. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2146.665913] env[62684]: DEBUG oslo_vmware.api [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053391, 'name': ReconfigVM_Task, 'duration_secs': 0.270488} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2146.666288] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Reconfigured VM instance instance-00000058 to attach disk [datastore1] f037d6b2-2082-4611-985e-b9a077eb8250/f037d6b2-2082-4611-985e-b9a077eb8250.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2146.666989] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-064868ea-54c2-4018-bdc3-b4836dad1992 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.673845] env[62684]: DEBUG oslo_vmware.api [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 2146.673845] env[62684]: value = "task-2053394" [ 2146.673845] env[62684]: _type = "Task" [ 2146.673845] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2146.682159] env[62684]: DEBUG oslo_vmware.api [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053394, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2146.759231] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 3.318s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2146.759414] env[62684]: INFO nova.compute.manager [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Migrating [ 2146.765919] env[62684]: DEBUG oslo_concurrency.lockutils [None req-86c41030-fb97-4db7-b765-6b280ef43a8c tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.841s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2146.766307] env[62684]: DEBUG nova.objects.instance [None req-86c41030-fb97-4db7-b765-6b280ef43a8c tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Lazy-loading 'resources' on Instance uuid e8c90faa-2c25-4308-9781-80d308b9211c {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2146.767809] env[62684]: DEBUG oslo_concurrency.lockutils [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Releasing lock "refresh_cache-aebbc2cc-8973-4907-9ec8-085027fd7ca3" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2146.768089] env[62684]: DEBUG nova.compute.manager [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Instance network_info: |[{"id": "ca16d302-c6d1-48a0-ac08-8031db433cc7", "address": "fa:16:3e:02:7e:42", "network": {"id": "b24dd0c0-a394-4ca6-a79a-94535bc1df6f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2023102141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "607a0aa1049640d882d7dd490f5f98ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca16d302-c6", "ovs_interfaceid": "ca16d302-c6d1-48a0-ac08-8031db433cc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2146.770982] env[62684]: DEBUG oslo_concurrency.lockutils [req-89788043-1f6f-4430-b9de-4ab65440b54a req-d38fa7fa-8e99-4959-867a-b2512f69c2c7 service nova] Acquired lock "refresh_cache-aebbc2cc-8973-4907-9ec8-085027fd7ca3" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2146.771225] env[62684]: DEBUG nova.network.neutron [req-89788043-1f6f-4430-b9de-4ab65440b54a req-d38fa7fa-8e99-4959-867a-b2512f69c2c7 service nova] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Refreshing network info cache for port ca16d302-c6d1-48a0-ac08-8031db433cc7 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2146.772273] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:02:7e:42', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ca16d302-c6d1-48a0-ac08-8031db433cc7', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2146.780818] env[62684]: DEBUG oslo.service.loopingcall [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2146.788921] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2146.789962] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5ca420e3-e034-4e29-9290-9b3269690b1b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.813371] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2146.813371] env[62684]: value = "task-2053395" [ 2146.813371] env[62684]: _type = "Task" [ 2146.813371] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2146.821750] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053395, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2147.042798] env[62684]: DEBUG nova.network.neutron [req-89788043-1f6f-4430-b9de-4ab65440b54a req-d38fa7fa-8e99-4959-867a-b2512f69c2c7 service nova] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Updated VIF entry in instance network info cache for port ca16d302-c6d1-48a0-ac08-8031db433cc7. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2147.043193] env[62684]: DEBUG nova.network.neutron [req-89788043-1f6f-4430-b9de-4ab65440b54a req-d38fa7fa-8e99-4959-867a-b2512f69c2c7 service nova] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Updating instance_info_cache with network_info: [{"id": "ca16d302-c6d1-48a0-ac08-8031db433cc7", "address": "fa:16:3e:02:7e:42", "network": {"id": "b24dd0c0-a394-4ca6-a79a-94535bc1df6f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2023102141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "607a0aa1049640d882d7dd490f5f98ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca16d302-c6", "ovs_interfaceid": "ca16d302-c6d1-48a0-ac08-8031db433cc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2147.090660] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053392, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2147.098394] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Acquiring lock "refresh_cache-7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2147.098648] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Acquired lock "refresh_cache-7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2147.098834] env[62684]: DEBUG nova.network.neutron [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2147.141656] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2147.141656] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52fb76eb-53bd-3579-2a14-9586ce76a4b4" [ 2147.141656] env[62684]: _type = "HttpNfcLease" [ 2147.141656] env[62684]: } is ready. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2147.141990] env[62684]: DEBUG oslo_vmware.rw_handles [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2147.141990] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52fb76eb-53bd-3579-2a14-9586ce76a4b4" [ 2147.141990] env[62684]: _type = "HttpNfcLease" [ 2147.141990] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2147.142683] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a60d6adc-3ed9-4c7e-afec-1be521f82ee1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.151252] env[62684]: DEBUG oslo_vmware.rw_handles [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520a84c4-3c81-344d-cd95-11dfad162211/disk-0.vmdk from lease info. {{(pid=62684) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2147.151404] env[62684]: DEBUG oslo_vmware.rw_handles [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520a84c4-3c81-344d-cd95-11dfad162211/disk-0.vmdk for reading. {{(pid=62684) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2147.219233] env[62684]: DEBUG oslo_vmware.api [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053394, 'name': Rename_Task, 'duration_secs': 0.137744} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2147.219548] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2147.219842] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-90b47dc3-52c9-4a5c-b664-5dabe6c37e3d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.226553] env[62684]: DEBUG oslo_vmware.api [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 2147.226553] env[62684]: value = "task-2053396" [ 2147.226553] env[62684]: _type = "Task" [ 2147.226553] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2147.234255] env[62684]: DEBUG oslo_vmware.api [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053396, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2147.254810] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-4c6d4d7f-00f6-4476-80d1-38713524cd7f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.290861] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "refresh_cache-0a8d7c48-cf90-4baf-a900-38fbd62869a6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2147.291124] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquired lock "refresh_cache-0a8d7c48-cf90-4baf-a900-38fbd62869a6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2147.291280] env[62684]: DEBUG nova.network.neutron [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2147.325891] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053395, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2147.513288] env[62684]: DEBUG nova.network.neutron [req-56bb2d74-0b48-41d0-bc93-79bc69cfc3a5 req-e0e18376-5f00-4046-beb2-a47528189595 service nova] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Updated VIF entry in instance network info cache for port 866e13c3-fdd0-4c15-97db-5002d50eabeb. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2147.513759] env[62684]: DEBUG nova.network.neutron [req-56bb2d74-0b48-41d0-bc93-79bc69cfc3a5 req-e0e18376-5f00-4046-beb2-a47528189595 service nova] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Updating instance_info_cache with network_info: [{"id": "866e13c3-fdd0-4c15-97db-5002d50eabeb", "address": "fa:16:3e:7b:e9:ee", "network": {"id": "aa52badb-0b73-48bc-afaa-5e06a97d5c7d", "bridge": "br-int", "label": "tempest-ServersTestJSON-556342067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c54f74085f343d2b790145b0d82a9f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap866e13c3-fd", "ovs_interfaceid": "866e13c3-fdd0-4c15-97db-5002d50eabeb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2147.532457] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02c229c2-4f78-44ee-9094-eb81f425dd35 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.540933] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf24a932-4a5a-4f5c-9464-60aa64fa0724 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.545618] env[62684]: DEBUG oslo_concurrency.lockutils [req-89788043-1f6f-4430-b9de-4ab65440b54a req-d38fa7fa-8e99-4959-867a-b2512f69c2c7 service nova] Releasing lock "refresh_cache-aebbc2cc-8973-4907-9ec8-085027fd7ca3" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2147.574897] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-352c8dd7-21b8-4928-9a28-b7414d82cec0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.586513] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9529f1d-6462-47da-88f0-fc9124910dbf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.595947] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053392, 'name': CreateVM_Task, 'duration_secs': 0.576418} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2147.603641] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2147.606078] env[62684]: DEBUG nova.compute.provider_tree [None req-86c41030-fb97-4db7-b765-6b280ef43a8c tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2147.608157] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2147.608450] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2147.609059] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2147.609715] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09b9a4b0-8efe-4d8a-857d-0c82814552b6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.614971] env[62684]: DEBUG oslo_vmware.api [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2147.614971] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529d1959-6384-a8e5-dccb-78287f36bd5e" [ 2147.614971] env[62684]: _type = "Task" [ 2147.614971] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2147.625137] env[62684]: DEBUG oslo_vmware.api [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529d1959-6384-a8e5-dccb-78287f36bd5e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2147.679383] env[62684]: DEBUG nova.network.neutron [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2147.741385] env[62684]: DEBUG oslo_vmware.api [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053396, 'name': PowerOnVM_Task, 'duration_secs': 0.459877} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2147.741786] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2147.742085] env[62684]: INFO nova.compute.manager [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Took 12.09 seconds to spawn the instance on the hypervisor. [ 2147.742333] env[62684]: DEBUG nova.compute.manager [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2147.743300] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7073785a-a464-4298-bfb2-9865c369a13d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.827075] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053395, 'name': CreateVM_Task, 'duration_secs': 0.566502} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2147.827075] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2147.827075] env[62684]: DEBUG oslo_concurrency.lockutils [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2147.896629] env[62684]: DEBUG nova.network.neutron [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Updating instance_info_cache with network_info: [{"id": "9755599a-bf6c-415f-b6dc-88d5d3774944", "address": "fa:16:3e:1d:6c:e1", "network": {"id": "8eebb0b3-51e4-44c0-a4b4-b45647332a9e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-398889609-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "43304d5c52344bd9841dbc760a174b4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9755599a-bf", "ovs_interfaceid": "9755599a-bf6c-415f-b6dc-88d5d3774944", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2148.016601] env[62684]: DEBUG oslo_concurrency.lockutils [req-56bb2d74-0b48-41d0-bc93-79bc69cfc3a5 req-e0e18376-5f00-4046-beb2-a47528189595 service nova] Releasing lock "refresh_cache-9418b42d-9fff-41fd-92d1-a832017fc9c3" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2148.017335] env[62684]: DEBUG nova.compute.manager [req-56bb2d74-0b48-41d0-bc93-79bc69cfc3a5 req-e0e18376-5f00-4046-beb2-a47528189595 service nova] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Received event network-vif-plugged-9755599a-bf6c-415f-b6dc-88d5d3774944 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2148.017335] env[62684]: DEBUG oslo_concurrency.lockutils [req-56bb2d74-0b48-41d0-bc93-79bc69cfc3a5 req-e0e18376-5f00-4046-beb2-a47528189595 service nova] Acquiring lock "7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2148.018627] env[62684]: DEBUG oslo_concurrency.lockutils [req-56bb2d74-0b48-41d0-bc93-79bc69cfc3a5 req-e0e18376-5f00-4046-beb2-a47528189595 service nova] Lock "7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2148.018627] env[62684]: DEBUG oslo_concurrency.lockutils [req-56bb2d74-0b48-41d0-bc93-79bc69cfc3a5 req-e0e18376-5f00-4046-beb2-a47528189595 service nova] Lock "7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2148.018627] env[62684]: DEBUG nova.compute.manager [req-56bb2d74-0b48-41d0-bc93-79bc69cfc3a5 req-e0e18376-5f00-4046-beb2-a47528189595 service nova] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] No waiting events found dispatching network-vif-plugged-9755599a-bf6c-415f-b6dc-88d5d3774944 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2148.018627] env[62684]: WARNING nova.compute.manager [req-56bb2d74-0b48-41d0-bc93-79bc69cfc3a5 req-e0e18376-5f00-4046-beb2-a47528189595 service nova] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Received unexpected event network-vif-plugged-9755599a-bf6c-415f-b6dc-88d5d3774944 for instance with vm_state building and task_state spawning. [ 2148.018627] env[62684]: DEBUG nova.compute.manager [req-56bb2d74-0b48-41d0-bc93-79bc69cfc3a5 req-e0e18376-5f00-4046-beb2-a47528189595 service nova] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Received event network-changed-9755599a-bf6c-415f-b6dc-88d5d3774944 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2148.018627] env[62684]: DEBUG nova.compute.manager [req-56bb2d74-0b48-41d0-bc93-79bc69cfc3a5 req-e0e18376-5f00-4046-beb2-a47528189595 service nova] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Refreshing instance network info cache due to event network-changed-9755599a-bf6c-415f-b6dc-88d5d3774944. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2148.018981] env[62684]: DEBUG oslo_concurrency.lockutils [req-56bb2d74-0b48-41d0-bc93-79bc69cfc3a5 req-e0e18376-5f00-4046-beb2-a47528189595 service nova] Acquiring lock "refresh_cache-7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2148.111935] env[62684]: DEBUG nova.scheduler.client.report [None req-86c41030-fb97-4db7-b765-6b280ef43a8c tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2148.133042] env[62684]: DEBUG oslo_vmware.api [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529d1959-6384-a8e5-dccb-78287f36bd5e, 'name': SearchDatastore_Task, 'duration_secs': 0.013527} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2148.133042] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2148.133042] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2148.133042] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2148.133042] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2148.133042] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2148.133530] env[62684]: DEBUG oslo_concurrency.lockutils [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2148.134299] env[62684]: DEBUG oslo_concurrency.lockutils [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2148.134538] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-21d4cd38-0119-4df3-af50-a2ef6c507bf8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.137428] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87309d64-462c-4c65-b271-e28333e8ac4b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.144106] env[62684]: DEBUG oslo_vmware.api [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2148.144106] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529a8752-6ac0-6472-2a76-2a0eacb381c6" [ 2148.144106] env[62684]: _type = "Task" [ 2148.144106] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2148.152716] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2148.154375] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2148.155503] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f653922-8028-4bd6-bddc-0a2508f69b04 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.164294] env[62684]: DEBUG oslo_vmware.api [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529a8752-6ac0-6472-2a76-2a0eacb381c6, 'name': SearchDatastore_Task, 'duration_secs': 0.009773} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2148.165331] env[62684]: DEBUG oslo_concurrency.lockutils [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2148.165722] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2148.166142] env[62684]: DEBUG oslo_concurrency.lockutils [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2148.168606] env[62684]: DEBUG oslo_vmware.api [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2148.168606] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5291b6e9-160b-2810-c24e-e28b101de39d" [ 2148.168606] env[62684]: _type = "Task" [ 2148.168606] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2148.180789] env[62684]: DEBUG oslo_vmware.api [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5291b6e9-160b-2810-c24e-e28b101de39d, 'name': SearchDatastore_Task, 'duration_secs': 0.00868} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2148.182446] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f20f758a-3df8-4d43-afad-ce1e6aa18f4f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.189373] env[62684]: DEBUG oslo_vmware.api [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2148.189373] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520a46f6-33b2-8850-1c20-3e3f9e95b677" [ 2148.189373] env[62684]: _type = "Task" [ 2148.189373] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2148.201623] env[62684]: DEBUG oslo_vmware.api [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520a46f6-33b2-8850-1c20-3e3f9e95b677, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2148.263982] env[62684]: INFO nova.compute.manager [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Took 21.97 seconds to build instance. [ 2148.378189] env[62684]: DEBUG nova.network.neutron [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Updating instance_info_cache with network_info: [{"id": "f121aad3-8e11-4583-8919-c502deebb5e2", "address": "fa:16:3e:08:50:be", "network": {"id": "7678b347-6a54-4b84-9a4d-b566bbeb1ea4", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-51664912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76d88ac878d44480b3b54b24ab87efa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf121aad3-8e", "ovs_interfaceid": "f121aad3-8e11-4583-8919-c502deebb5e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2148.399794] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Releasing lock "refresh_cache-7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2148.400222] env[62684]: DEBUG nova.compute.manager [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Instance network_info: |[{"id": "9755599a-bf6c-415f-b6dc-88d5d3774944", "address": "fa:16:3e:1d:6c:e1", "network": {"id": "8eebb0b3-51e4-44c0-a4b4-b45647332a9e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-398889609-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "43304d5c52344bd9841dbc760a174b4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9755599a-bf", "ovs_interfaceid": "9755599a-bf6c-415f-b6dc-88d5d3774944", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2148.400754] env[62684]: DEBUG oslo_concurrency.lockutils [req-56bb2d74-0b48-41d0-bc93-79bc69cfc3a5 req-e0e18376-5f00-4046-beb2-a47528189595 service nova] Acquired lock "refresh_cache-7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2148.402052] env[62684]: DEBUG nova.network.neutron [req-56bb2d74-0b48-41d0-bc93-79bc69cfc3a5 req-e0e18376-5f00-4046-beb2-a47528189595 service nova] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Refreshing network info cache for port 9755599a-bf6c-415f-b6dc-88d5d3774944 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2148.403210] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1d:6c:e1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9755599a-bf6c-415f-b6dc-88d5d3774944', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2148.410888] env[62684]: DEBUG oslo.service.loopingcall [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2148.414260] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2148.414949] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8c206483-0c53-478a-bb8f-7b5934fb6cb0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.436432] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2148.436432] env[62684]: value = "task-2053397" [ 2148.436432] env[62684]: _type = "Task" [ 2148.436432] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2148.444202] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053397, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2148.622804] env[62684]: DEBUG oslo_concurrency.lockutils [None req-86c41030-fb97-4db7-b765-6b280ef43a8c tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.857s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2148.629660] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dbd2a3fb-2de9-40b6-928b-e7e57e21bdad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.429s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2148.629660] env[62684]: DEBUG nova.objects.instance [None req-dbd2a3fb-2de9-40b6-928b-e7e57e21bdad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lazy-loading 'resources' on Instance uuid 025dfe36-1f14-4bda-84a0-d424364b745b {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2148.656364] env[62684]: INFO nova.scheduler.client.report [None req-86c41030-fb97-4db7-b765-6b280ef43a8c tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Deleted allocations for instance e8c90faa-2c25-4308-9781-80d308b9211c [ 2148.700792] env[62684]: DEBUG oslo_vmware.api [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520a46f6-33b2-8850-1c20-3e3f9e95b677, 'name': SearchDatastore_Task, 'duration_secs': 0.010507} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2148.701173] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2148.704492] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 9418b42d-9fff-41fd-92d1-a832017fc9c3/9418b42d-9fff-41fd-92d1-a832017fc9c3.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2148.704492] env[62684]: DEBUG oslo_concurrency.lockutils [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2148.704492] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2148.704492] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fb25f95f-af65-4cbd-988c-af713d315806 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.705012] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e1519d0a-ae92-4363-83e6-dcfee746f4b3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.714110] env[62684]: DEBUG oslo_vmware.api [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2148.714110] env[62684]: value = "task-2053398" [ 2148.714110] env[62684]: _type = "Task" [ 2148.714110] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2148.714663] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2148.714921] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2148.719972] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4372736c-890b-4618-a091-371754ba4feb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.726298] env[62684]: DEBUG oslo_vmware.api [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2148.726298] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52fd1cab-c0d3-6962-d3a8-9c55bac36f78" [ 2148.726298] env[62684]: _type = "Task" [ 2148.726298] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2148.731893] env[62684]: DEBUG oslo_vmware.api [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053398, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2148.740559] env[62684]: DEBUG oslo_vmware.api [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52fd1cab-c0d3-6962-d3a8-9c55bac36f78, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2148.744916] env[62684]: DEBUG nova.network.neutron [req-56bb2d74-0b48-41d0-bc93-79bc69cfc3a5 req-e0e18376-5f00-4046-beb2-a47528189595 service nova] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Updated VIF entry in instance network info cache for port 9755599a-bf6c-415f-b6dc-88d5d3774944. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2148.745373] env[62684]: DEBUG nova.network.neutron [req-56bb2d74-0b48-41d0-bc93-79bc69cfc3a5 req-e0e18376-5f00-4046-beb2-a47528189595 service nova] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Updating instance_info_cache with network_info: [{"id": "9755599a-bf6c-415f-b6dc-88d5d3774944", "address": "fa:16:3e:1d:6c:e1", "network": {"id": "8eebb0b3-51e4-44c0-a4b4-b45647332a9e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-398889609-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "43304d5c52344bd9841dbc760a174b4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9755599a-bf", "ovs_interfaceid": "9755599a-bf6c-415f-b6dc-88d5d3774944", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2148.766245] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d9ff4944-b3a9-4c09-bccf-4d3d4848b2ed tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "f037d6b2-2082-4611-985e-b9a077eb8250" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.485s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2148.882039] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Releasing lock "refresh_cache-0a8d7c48-cf90-4baf-a900-38fbd62869a6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2148.947144] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053397, 'name': CreateVM_Task, 'duration_secs': 0.456929} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2148.947402] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2148.948327] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2148.948615] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2148.949233] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2148.949585] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94be754e-8c2e-4e57-9625-18cb27404c2b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.955752] env[62684]: DEBUG oslo_vmware.api [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for the task: (returnval){ [ 2148.955752] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]528234c4-4ee6-069c-f244-60752c8f9a90" [ 2148.955752] env[62684]: _type = "Task" [ 2148.955752] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2148.964917] env[62684]: DEBUG oslo_vmware.api [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]528234c4-4ee6-069c-f244-60752c8f9a90, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2149.170626] env[62684]: DEBUG oslo_concurrency.lockutils [None req-86c41030-fb97-4db7-b765-6b280ef43a8c tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Lock "e8c90faa-2c25-4308-9781-80d308b9211c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.405s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2149.226549] env[62684]: DEBUG oslo_vmware.api [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053398, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2149.243024] env[62684]: DEBUG oslo_vmware.api [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52fd1cab-c0d3-6962-d3a8-9c55bac36f78, 'name': SearchDatastore_Task, 'duration_secs': 0.022413} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2149.243024] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72c8cc69-1fa3-4cd3-91dc-be8ee004baef {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.248794] env[62684]: DEBUG oslo_concurrency.lockutils [req-56bb2d74-0b48-41d0-bc93-79bc69cfc3a5 req-e0e18376-5f00-4046-beb2-a47528189595 service nova] Releasing lock "refresh_cache-7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2149.250429] env[62684]: DEBUG oslo_vmware.api [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2149.250429] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]522a0e2a-8d37-130b-afb2-4a9698ca418d" [ 2149.250429] env[62684]: _type = "Task" [ 2149.250429] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2149.258619] env[62684]: DEBUG oslo_vmware.api [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]522a0e2a-8d37-130b-afb2-4a9698ca418d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2149.428172] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06d40ecb-452d-4445-be38-92e1cbd4bab7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.438608] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33444e60-ab06-4377-85e0-0c5103a9ed90 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.484547] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-756eee2c-2347-4157-bb75-d1524873ec2a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.494184] env[62684]: DEBUG oslo_vmware.api [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]528234c4-4ee6-069c-f244-60752c8f9a90, 'name': SearchDatastore_Task, 'duration_secs': 0.01881} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2149.497278] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2149.497996] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2149.498461] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2149.498720] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2149.499114] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2149.499616] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-42b39d77-32b4-4575-ae9e-65dbf2ee91bf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.503383] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91a336e2-12a9-46ab-90fe-1e335e264893 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.521280] env[62684]: DEBUG nova.compute.provider_tree [None req-dbd2a3fb-2de9-40b6-928b-e7e57e21bdad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2149.524407] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2149.524940] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2149.525991] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f518ccc7-40e3-43eb-9a38-220f9ace5f89 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.531819] env[62684]: DEBUG oslo_vmware.api [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for the task: (returnval){ [ 2149.531819] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ed2110-2d0e-3a3b-8883-dce98be8aa14" [ 2149.531819] env[62684]: _type = "Task" [ 2149.531819] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2149.541540] env[62684]: DEBUG oslo_vmware.api [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ed2110-2d0e-3a3b-8883-dce98be8aa14, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2149.636834] env[62684]: DEBUG nova.compute.manager [req-3b3837bd-c0dd-4ce0-a143-a74d6260cca9 req-9f3933a7-0c14-4fcc-bf88-d82951e033cd service nova] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Received event network-changed-b5747949-00d7-4815-9080-52285a6a8813 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2149.636931] env[62684]: DEBUG nova.compute.manager [req-3b3837bd-c0dd-4ce0-a143-a74d6260cca9 req-9f3933a7-0c14-4fcc-bf88-d82951e033cd service nova] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Refreshing instance network info cache due to event network-changed-b5747949-00d7-4815-9080-52285a6a8813. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2149.637191] env[62684]: DEBUG oslo_concurrency.lockutils [req-3b3837bd-c0dd-4ce0-a143-a74d6260cca9 req-9f3933a7-0c14-4fcc-bf88-d82951e033cd service nova] Acquiring lock "refresh_cache-f037d6b2-2082-4611-985e-b9a077eb8250" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2149.637420] env[62684]: DEBUG oslo_concurrency.lockutils [req-3b3837bd-c0dd-4ce0-a143-a74d6260cca9 req-9f3933a7-0c14-4fcc-bf88-d82951e033cd service nova] Acquired lock "refresh_cache-f037d6b2-2082-4611-985e-b9a077eb8250" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2149.637621] env[62684]: DEBUG nova.network.neutron [req-3b3837bd-c0dd-4ce0-a143-a74d6260cca9 req-9f3933a7-0c14-4fcc-bf88-d82951e033cd service nova] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Refreshing network info cache for port b5747949-00d7-4815-9080-52285a6a8813 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2149.726321] env[62684]: DEBUG oslo_vmware.api [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053398, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.533069} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2149.727749] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 9418b42d-9fff-41fd-92d1-a832017fc9c3/9418b42d-9fff-41fd-92d1-a832017fc9c3.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2149.728288] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2149.728802] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5ca20003-48e7-4fce-8689-a7d5fb1fa94e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.737616] env[62684]: DEBUG oslo_vmware.api [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2149.737616] env[62684]: value = "task-2053399" [ 2149.737616] env[62684]: _type = "Task" [ 2149.737616] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2149.750386] env[62684]: DEBUG oslo_vmware.api [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053399, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2149.761995] env[62684]: DEBUG oslo_vmware.api [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]522a0e2a-8d37-130b-afb2-4a9698ca418d, 'name': SearchDatastore_Task, 'duration_secs': 0.011118} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2149.762349] env[62684]: DEBUG oslo_concurrency.lockutils [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2149.762664] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] aebbc2cc-8973-4907-9ec8-085027fd7ca3/aebbc2cc-8973-4907-9ec8-085027fd7ca3.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2149.762980] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d5788498-e0eb-4823-8e4d-14db63f4bc4b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.771533] env[62684]: DEBUG oslo_vmware.api [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2149.771533] env[62684]: value = "task-2053400" [ 2149.771533] env[62684]: _type = "Task" [ 2149.771533] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2149.781604] env[62684]: DEBUG oslo_vmware.api [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053400, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2150.008835] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Acquiring lock "7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2150.009106] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Lock "7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2150.027549] env[62684]: DEBUG nova.scheduler.client.report [None req-dbd2a3fb-2de9-40b6-928b-e7e57e21bdad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2150.045408] env[62684]: DEBUG oslo_vmware.api [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ed2110-2d0e-3a3b-8883-dce98be8aa14, 'name': SearchDatastore_Task, 'duration_secs': 0.015904} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2150.046432] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c220fd6-8e95-44cc-84ca-ba12382c9f64 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.053965] env[62684]: DEBUG oslo_vmware.api [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for the task: (returnval){ [ 2150.053965] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5281ef93-9dc0-c2bd-39e9-d25c675376bd" [ 2150.053965] env[62684]: _type = "Task" [ 2150.053965] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2150.064674] env[62684]: DEBUG oslo_vmware.api [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5281ef93-9dc0-c2bd-39e9-d25c675376bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2150.253628] env[62684]: DEBUG oslo_vmware.api [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053399, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091827} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2150.254050] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2150.254995] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bf9ffd0-1de9-4155-8198-0ded9d1a65af {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.285659] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] 9418b42d-9fff-41fd-92d1-a832017fc9c3/9418b42d-9fff-41fd-92d1-a832017fc9c3.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2150.287287] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-de4c5622-cb17-4f0a-a346-1b84fd56298f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.320063] env[62684]: DEBUG oslo_vmware.api [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053400, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2150.326659] env[62684]: DEBUG oslo_vmware.api [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2150.326659] env[62684]: value = "task-2053401" [ 2150.326659] env[62684]: _type = "Task" [ 2150.326659] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2150.339661] env[62684]: DEBUG oslo_vmware.api [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053401, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2150.409226] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b50144d-931f-43be-9b8d-123c7395e090 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.429302] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Updating instance '0a8d7c48-cf90-4baf-a900-38fbd62869a6' progress to 0 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2150.501872] env[62684]: DEBUG nova.network.neutron [req-3b3837bd-c0dd-4ce0-a143-a74d6260cca9 req-9f3933a7-0c14-4fcc-bf88-d82951e033cd service nova] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Updated VIF entry in instance network info cache for port b5747949-00d7-4815-9080-52285a6a8813. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2150.502284] env[62684]: DEBUG nova.network.neutron [req-3b3837bd-c0dd-4ce0-a143-a74d6260cca9 req-9f3933a7-0c14-4fcc-bf88-d82951e033cd service nova] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Updating instance_info_cache with network_info: [{"id": "b5747949-00d7-4815-9080-52285a6a8813", "address": "fa:16:3e:fd:34:0c", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.240", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5747949-00", "ovs_interfaceid": "b5747949-00d7-4815-9080-52285a6a8813", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2150.512687] env[62684]: DEBUG nova.compute.manager [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2150.533092] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dbd2a3fb-2de9-40b6-928b-e7e57e21bdad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.904s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2150.559766] env[62684]: INFO nova.scheduler.client.report [None req-dbd2a3fb-2de9-40b6-928b-e7e57e21bdad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Deleted allocations for instance 025dfe36-1f14-4bda-84a0-d424364b745b [ 2150.567508] env[62684]: DEBUG oslo_vmware.api [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5281ef93-9dc0-c2bd-39e9-d25c675376bd, 'name': SearchDatastore_Task, 'duration_secs': 0.022618} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2150.568093] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2150.568388] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd/7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2150.568680] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-045e7ff9-576c-45b3-bedf-678ae6c1a055 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.575571] env[62684]: DEBUG oslo_vmware.api [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for the task: (returnval){ [ 2150.575571] env[62684]: value = "task-2053402" [ 2150.575571] env[62684]: _type = "Task" [ 2150.575571] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2150.584701] env[62684]: DEBUG oslo_vmware.api [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053402, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2150.661184] env[62684]: DEBUG oslo_concurrency.lockutils [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "d7f09d0e-f7b6-415e-8d82-47eba1153aa1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2150.661600] env[62684]: DEBUG oslo_concurrency.lockutils [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "d7f09d0e-f7b6-415e-8d82-47eba1153aa1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2150.783804] env[62684]: DEBUG oslo_vmware.api [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053400, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.579789} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2150.784132] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] aebbc2cc-8973-4907-9ec8-085027fd7ca3/aebbc2cc-8973-4907-9ec8-085027fd7ca3.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2150.784373] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2150.784651] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-176f1ea0-4b2d-4390-a862-ccaff1e967fd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.793231] env[62684]: DEBUG oslo_vmware.api [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2150.793231] env[62684]: value = "task-2053403" [ 2150.793231] env[62684]: _type = "Task" [ 2150.793231] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2150.802390] env[62684]: DEBUG oslo_vmware.api [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053403, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2150.836687] env[62684]: DEBUG oslo_vmware.api [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053401, 'name': ReconfigVM_Task, 'duration_secs': 0.321664} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2150.837012] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Reconfigured VM instance instance-00000059 to attach disk [datastore1] 9418b42d-9fff-41fd-92d1-a832017fc9c3/9418b42d-9fff-41fd-92d1-a832017fc9c3.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2150.837726] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b75c0d6a-61b6-4919-b660-52c1e3743a79 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.844599] env[62684]: DEBUG oslo_vmware.api [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2150.844599] env[62684]: value = "task-2053404" [ 2150.844599] env[62684]: _type = "Task" [ 2150.844599] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2150.853483] env[62684]: DEBUG oslo_vmware.api [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053404, 'name': Rename_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2150.935296] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2150.936025] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ee052a75-6653-4afe-bd94-97c44346aad3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.949317] env[62684]: DEBUG oslo_vmware.api [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2150.949317] env[62684]: value = "task-2053405" [ 2150.949317] env[62684]: _type = "Task" [ 2150.949317] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2150.960985] env[62684]: DEBUG oslo_vmware.api [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053405, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2151.005556] env[62684]: DEBUG oslo_concurrency.lockutils [req-3b3837bd-c0dd-4ce0-a143-a74d6260cca9 req-9f3933a7-0c14-4fcc-bf88-d82951e033cd service nova] Releasing lock "refresh_cache-f037d6b2-2082-4611-985e-b9a077eb8250" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2151.034314] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2151.034596] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2151.036386] env[62684]: INFO nova.compute.claims [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2151.072413] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dbd2a3fb-2de9-40b6-928b-e7e57e21bdad tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "025dfe36-1f14-4bda-84a0-d424364b745b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.819s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2151.086624] env[62684]: DEBUG oslo_vmware.api [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053402, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2151.164055] env[62684]: DEBUG nova.compute.manager [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2151.304250] env[62684]: DEBUG oslo_vmware.api [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053403, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076352} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2151.304770] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2151.305497] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04b29929-4f4a-480d-8ce4-a1111be63d53 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2151.329612] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] aebbc2cc-8973-4907-9ec8-085027fd7ca3/aebbc2cc-8973-4907-9ec8-085027fd7ca3.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2151.329934] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cfe2e323-69d3-4273-9418-53e9517135d4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2151.350240] env[62684]: DEBUG oslo_vmware.api [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2151.350240] env[62684]: value = "task-2053406" [ 2151.350240] env[62684]: _type = "Task" [ 2151.350240] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2151.353483] env[62684]: DEBUG oslo_vmware.api [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053404, 'name': Rename_Task, 'duration_secs': 0.262791} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2151.356365] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2151.356612] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-633309c7-55ec-43ba-b891-dfd5ecd650e1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2151.363016] env[62684]: DEBUG oslo_vmware.api [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053406, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2151.364128] env[62684]: DEBUG oslo_vmware.api [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2151.364128] env[62684]: value = "task-2053407" [ 2151.364128] env[62684]: _type = "Task" [ 2151.364128] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2151.382036] env[62684]: DEBUG oslo_vmware.api [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053407, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2151.459396] env[62684]: DEBUG oslo_vmware.api [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053405, 'name': PowerOffVM_Task, 'duration_secs': 0.293645} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2151.459694] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2151.459887] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Updating instance '0a8d7c48-cf90-4baf-a900-38fbd62869a6' progress to 17 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2151.586249] env[62684]: DEBUG oslo_vmware.api [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053402, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.53476} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2151.586582] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd/7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2151.586880] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2151.587196] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e2da5f79-5b5f-42da-b624-d4463af69b03 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2151.594185] env[62684]: DEBUG oslo_vmware.api [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for the task: (returnval){ [ 2151.594185] env[62684]: value = "task-2053408" [ 2151.594185] env[62684]: _type = "Task" [ 2151.594185] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2151.602815] env[62684]: DEBUG oslo_vmware.api [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053408, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2151.689662] env[62684]: DEBUG oslo_concurrency.lockutils [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2151.863778] env[62684]: DEBUG oslo_vmware.api [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053406, 'name': ReconfigVM_Task, 'duration_secs': 0.354093} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2151.864092] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Reconfigured VM instance instance-0000005a to attach disk [datastore1] aebbc2cc-8973-4907-9ec8-085027fd7ca3/aebbc2cc-8973-4907-9ec8-085027fd7ca3.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2151.864868] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-19e27333-d46d-4205-9202-ff2392e11f1a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2151.875548] env[62684]: DEBUG oslo_vmware.api [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053407, 'name': PowerOnVM_Task, 'duration_secs': 0.475818} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2151.877176] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2151.877235] env[62684]: INFO nova.compute.manager [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Took 13.87 seconds to spawn the instance on the hypervisor. [ 2151.877829] env[62684]: DEBUG nova.compute.manager [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2151.877924] env[62684]: DEBUG oslo_vmware.api [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2151.877924] env[62684]: value = "task-2053409" [ 2151.877924] env[62684]: _type = "Task" [ 2151.877924] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2151.878646] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d574849-1918-4c60-bf82-7724a99d7786 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2151.890464] env[62684]: DEBUG oslo_vmware.api [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053409, 'name': Rename_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2151.967184] env[62684]: DEBUG nova.virt.hardware [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2151.967366] env[62684]: DEBUG nova.virt.hardware [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2151.967615] env[62684]: DEBUG nova.virt.hardware [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2151.968455] env[62684]: DEBUG nova.virt.hardware [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2151.968739] env[62684]: DEBUG nova.virt.hardware [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2151.968947] env[62684]: DEBUG nova.virt.hardware [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2151.969223] env[62684]: DEBUG nova.virt.hardware [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2151.969498] env[62684]: DEBUG nova.virt.hardware [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2151.969743] env[62684]: DEBUG nova.virt.hardware [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2151.969964] env[62684]: DEBUG nova.virt.hardware [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2151.970222] env[62684]: DEBUG nova.virt.hardware [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2151.975585] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a92dda41-c1c8-4581-84d9-385165750db8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2151.992713] env[62684]: DEBUG oslo_vmware.api [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2151.992713] env[62684]: value = "task-2053410" [ 2151.992713] env[62684]: _type = "Task" [ 2151.992713] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2152.002658] env[62684]: DEBUG oslo_vmware.api [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053410, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2152.103970] env[62684]: DEBUG oslo_vmware.api [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053408, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071219} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2152.104342] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2152.105192] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad6fe90b-aeb7-4e4d-9601-5bab458918cf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2152.132709] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Reconfiguring VM instance instance-0000005b to attach disk [datastore2] 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd/7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2152.136855] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8b87c6c7-ef6f-45af-b285-6a6dc2094a3e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2152.163972] env[62684]: DEBUG oslo_vmware.api [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for the task: (returnval){ [ 2152.163972] env[62684]: value = "task-2053411" [ 2152.163972] env[62684]: _type = "Task" [ 2152.163972] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2152.173764] env[62684]: DEBUG oslo_vmware.api [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053411, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2152.339423] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49f19cf1-1984-4c75-adf0-28282ab46a99 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2152.349727] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4eeaddc-118d-476b-8ff6-e9466b1c1e57 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2152.381404] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79ccd924-2045-4b17-be80-41b9eed2ceb4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2152.392617] env[62684]: DEBUG oslo_vmware.api [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053409, 'name': Rename_Task, 'duration_secs': 0.200795} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2152.394727] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2152.395055] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-61c6007f-79ed-4755-9ecf-f14aa5e5c9b7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2152.397592] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84c1f8c6-9ee4-45ab-9a58-7bb6a9413138 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2152.416575] env[62684]: DEBUG nova.compute.provider_tree [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2152.418274] env[62684]: INFO nova.compute.manager [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Took 24.71 seconds to build instance. [ 2152.420486] env[62684]: DEBUG oslo_vmware.api [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2152.420486] env[62684]: value = "task-2053412" [ 2152.420486] env[62684]: _type = "Task" [ 2152.420486] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2152.429795] env[62684]: DEBUG oslo_vmware.api [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053412, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2152.506026] env[62684]: DEBUG oslo_vmware.api [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053410, 'name': ReconfigVM_Task, 'duration_secs': 0.385845} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2152.506026] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Updating instance '0a8d7c48-cf90-4baf-a900-38fbd62869a6' progress to 33 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2152.674918] env[62684]: DEBUG oslo_vmware.api [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053411, 'name': ReconfigVM_Task, 'duration_secs': 0.487121} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2152.675370] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Reconfigured VM instance instance-0000005b to attach disk [datastore2] 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd/7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2152.676039] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-98aeb551-c550-4bf8-a9c1-800a93475d18 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2152.682268] env[62684]: DEBUG oslo_vmware.api [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for the task: (returnval){ [ 2152.682268] env[62684]: value = "task-2053413" [ 2152.682268] env[62684]: _type = "Task" [ 2152.682268] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2152.690488] env[62684]: DEBUG oslo_vmware.api [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053413, 'name': Rename_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2152.923041] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4fd810d6-877b-4d24-803f-676f55f74b78 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "9418b42d-9fff-41fd-92d1-a832017fc9c3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.220s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2152.924064] env[62684]: DEBUG nova.scheduler.client.report [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2152.938407] env[62684]: DEBUG oslo_vmware.api [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053412, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2153.011112] env[62684]: DEBUG nova.virt.hardware [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2153.011283] env[62684]: DEBUG nova.virt.hardware [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2153.011571] env[62684]: DEBUG nova.virt.hardware [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2153.011819] env[62684]: DEBUG nova.virt.hardware [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2153.012036] env[62684]: DEBUG nova.virt.hardware [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2153.012281] env[62684]: DEBUG nova.virt.hardware [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2153.012575] env[62684]: DEBUG nova.virt.hardware [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2153.012775] env[62684]: DEBUG nova.virt.hardware [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2153.012992] env[62684]: DEBUG nova.virt.hardware [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2153.013253] env[62684]: DEBUG nova.virt.hardware [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2153.013520] env[62684]: DEBUG nova.virt.hardware [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2153.021180] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Reconfiguring VM instance instance-00000057 to detach disk 2000 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2153.021619] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-41cc6ef1-df27-4067-be1d-ec81ef7d2f12 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.046044] env[62684]: DEBUG oslo_vmware.api [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2153.046044] env[62684]: value = "task-2053414" [ 2153.046044] env[62684]: _type = "Task" [ 2153.046044] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2153.055602] env[62684]: DEBUG oslo_vmware.api [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053414, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2153.128111] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7a702f72-6005-4ade-9cf7-743e2d6658d4 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "9418b42d-9fff-41fd-92d1-a832017fc9c3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2153.128588] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7a702f72-6005-4ade-9cf7-743e2d6658d4 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "9418b42d-9fff-41fd-92d1-a832017fc9c3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2153.129104] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7a702f72-6005-4ade-9cf7-743e2d6658d4 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "9418b42d-9fff-41fd-92d1-a832017fc9c3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2153.129216] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7a702f72-6005-4ade-9cf7-743e2d6658d4 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "9418b42d-9fff-41fd-92d1-a832017fc9c3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2153.129474] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7a702f72-6005-4ade-9cf7-743e2d6658d4 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "9418b42d-9fff-41fd-92d1-a832017fc9c3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2153.132375] env[62684]: INFO nova.compute.manager [None req-7a702f72-6005-4ade-9cf7-743e2d6658d4 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Terminating instance [ 2153.134913] env[62684]: DEBUG nova.compute.manager [None req-7a702f72-6005-4ade-9cf7-743e2d6658d4 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2153.135187] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7a702f72-6005-4ade-9cf7-743e2d6658d4 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2153.136422] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af6421d9-d8c0-4248-aee7-9d5e7e377494 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.147141] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a702f72-6005-4ade-9cf7-743e2d6658d4 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2153.147481] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a9111370-3fd3-4a98-9ddc-84491e81f4dc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.154652] env[62684]: DEBUG oslo_vmware.api [None req-7a702f72-6005-4ade-9cf7-743e2d6658d4 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2153.154652] env[62684]: value = "task-2053415" [ 2153.154652] env[62684]: _type = "Task" [ 2153.154652] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2153.165525] env[62684]: DEBUG oslo_vmware.api [None req-7a702f72-6005-4ade-9cf7-743e2d6658d4 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053415, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2153.191833] env[62684]: DEBUG oslo_vmware.api [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053413, 'name': Rename_Task, 'duration_secs': 0.203435} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2153.192177] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2153.192885] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9c48da50-f88e-4fb4-853d-3cabc704dad2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.200284] env[62684]: DEBUG oslo_vmware.api [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for the task: (returnval){ [ 2153.200284] env[62684]: value = "task-2053416" [ 2153.200284] env[62684]: _type = "Task" [ 2153.200284] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2153.208628] env[62684]: DEBUG oslo_vmware.api [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053416, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2153.432997] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.398s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2153.433625] env[62684]: DEBUG nova.compute.manager [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2153.441039] env[62684]: DEBUG oslo_concurrency.lockutils [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.751s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2153.441752] env[62684]: INFO nova.compute.claims [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2153.444281] env[62684]: DEBUG oslo_vmware.api [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053412, 'name': PowerOnVM_Task, 'duration_secs': 0.656523} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2153.444759] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2153.445083] env[62684]: INFO nova.compute.manager [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Took 12.11 seconds to spawn the instance on the hypervisor. [ 2153.445170] env[62684]: DEBUG nova.compute.manager [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2153.446165] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1908071-bab0-4205-825c-cfc0d2269900 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.556087] env[62684]: DEBUG oslo_vmware.api [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053414, 'name': ReconfigVM_Task, 'duration_secs': 0.186725} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2153.556353] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Reconfigured VM instance instance-00000057 to detach disk 2000 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2153.557132] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49f88a33-2461-449b-bb6c-acf1d27f11ad {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.579996] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] 0a8d7c48-cf90-4baf-a900-38fbd62869a6/0a8d7c48-cf90-4baf-a900-38fbd62869a6.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2153.580319] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-12a0bdc7-9c60-4317-ac46-f81303c2bfbd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.599302] env[62684]: DEBUG oslo_vmware.api [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2153.599302] env[62684]: value = "task-2053417" [ 2153.599302] env[62684]: _type = "Task" [ 2153.599302] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2153.611597] env[62684]: DEBUG oslo_vmware.api [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053417, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2153.668257] env[62684]: DEBUG oslo_vmware.api [None req-7a702f72-6005-4ade-9cf7-743e2d6658d4 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053415, 'name': PowerOffVM_Task, 'duration_secs': 0.198162} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2153.668257] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a702f72-6005-4ade-9cf7-743e2d6658d4 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2153.668257] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7a702f72-6005-4ade-9cf7-743e2d6658d4 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2153.668257] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5d23dc6f-0385-4359-ae89-16a9a046dc17 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.713314] env[62684]: DEBUG oslo_vmware.api [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053416, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2153.787774] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Acquiring lock "584845d2-d146-42bf-8ef5-58532fe24f65" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2153.787908] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "584845d2-d146-42bf-8ef5-58532fe24f65" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2153.814163] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7a702f72-6005-4ade-9cf7-743e2d6658d4 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2153.814400] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7a702f72-6005-4ade-9cf7-743e2d6658d4 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2153.814594] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a702f72-6005-4ade-9cf7-743e2d6658d4 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Deleting the datastore file [datastore1] 9418b42d-9fff-41fd-92d1-a832017fc9c3 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2153.814940] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-47b2c696-6879-401e-bdc6-8a05f686fdda {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.822163] env[62684]: DEBUG oslo_vmware.api [None req-7a702f72-6005-4ade-9cf7-743e2d6658d4 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2153.822163] env[62684]: value = "task-2053419" [ 2153.822163] env[62684]: _type = "Task" [ 2153.822163] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2153.830427] env[62684]: DEBUG oslo_vmware.api [None req-7a702f72-6005-4ade-9cf7-743e2d6658d4 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053419, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2153.947570] env[62684]: DEBUG nova.compute.utils [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2153.951333] env[62684]: DEBUG nova.compute.manager [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2153.951510] env[62684]: DEBUG nova.network.neutron [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2153.961971] env[62684]: INFO nova.compute.manager [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Took 22.31 seconds to build instance. [ 2154.031212] env[62684]: DEBUG nova.policy [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fe6c69e19eaa46978e4fe25513f42c0d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '359bcaa2eeb64bcbb6602062777b852e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2154.110074] env[62684]: DEBUG oslo_vmware.api [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053417, 'name': ReconfigVM_Task, 'duration_secs': 0.461128} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2154.110389] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Reconfigured VM instance instance-00000057 to attach disk [datastore1] 0a8d7c48-cf90-4baf-a900-38fbd62869a6/0a8d7c48-cf90-4baf-a900-38fbd62869a6.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2154.110709] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Updating instance '0a8d7c48-cf90-4baf-a900-38fbd62869a6' progress to 50 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2154.210499] env[62684]: DEBUG oslo_vmware.api [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053416, 'name': PowerOnVM_Task, 'duration_secs': 0.713237} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2154.210794] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2154.211015] env[62684]: INFO nova.compute.manager [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Took 9.24 seconds to spawn the instance on the hypervisor. [ 2154.211205] env[62684]: DEBUG nova.compute.manager [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2154.212048] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae95006a-1761-4737-8a0d-78dac5738715 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2154.290823] env[62684]: DEBUG nova.compute.manager [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2154.335145] env[62684]: DEBUG oslo_vmware.api [None req-7a702f72-6005-4ade-9cf7-743e2d6658d4 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053419, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.242121} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2154.335261] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a702f72-6005-4ade-9cf7-743e2d6658d4 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2154.335938] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7a702f72-6005-4ade-9cf7-743e2d6658d4 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2154.335938] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7a702f72-6005-4ade-9cf7-743e2d6658d4 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2154.335938] env[62684]: INFO nova.compute.manager [None req-7a702f72-6005-4ade-9cf7-743e2d6658d4 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Took 1.20 seconds to destroy the instance on the hypervisor. [ 2154.336076] env[62684]: DEBUG oslo.service.loopingcall [None req-7a702f72-6005-4ade-9cf7-743e2d6658d4 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2154.336254] env[62684]: DEBUG nova.compute.manager [-] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2154.336373] env[62684]: DEBUG nova.network.neutron [-] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2154.388235] env[62684]: DEBUG nova.network.neutron [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Successfully created port: 4be5b16b-2fb8-4e61-a763-f521de30fe81 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2154.452649] env[62684]: DEBUG nova.compute.manager [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2154.464670] env[62684]: DEBUG oslo_concurrency.lockutils [None req-89f2f18b-28e3-40fc-8130-00344bc453f9 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "aebbc2cc-8973-4907-9ec8-085027fd7ca3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.820s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2154.623489] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14d2347b-7f18-4cbc-aaa8-4de530727434 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2154.649239] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b67891d-c85f-48c2-98c5-542e55c34a77 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2154.672587] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Updating instance '0a8d7c48-cf90-4baf-a900-38fbd62869a6' progress to 67 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2154.731879] env[62684]: INFO nova.compute.manager [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Took 19.65 seconds to build instance. [ 2154.760998] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9031217-cd5d-4045-ada2-6f0c97e1a102 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2154.772019] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-534da198-e87a-4755-b1de-1cd7f496923c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2154.804490] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eaf1cfc-52cd-4219-9579-aa7c5cd01262 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2154.808212] env[62684]: DEBUG nova.compute.manager [req-dd49be90-e521-43bd-b05d-51cb94f5062f req-8fed32cc-85f1-464e-96a4-e57941ac4dc9 service nova] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Received event network-vif-deleted-866e13c3-fdd0-4c15-97db-5002d50eabeb {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2154.808446] env[62684]: INFO nova.compute.manager [req-dd49be90-e521-43bd-b05d-51cb94f5062f req-8fed32cc-85f1-464e-96a4-e57941ac4dc9 service nova] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Neutron deleted interface 866e13c3-fdd0-4c15-97db-5002d50eabeb; detaching it from the instance and deleting it from the info cache [ 2154.808639] env[62684]: DEBUG nova.network.neutron [req-dd49be90-e521-43bd-b05d-51cb94f5062f req-8fed32cc-85f1-464e-96a4-e57941ac4dc9 service nova] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2154.818842] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b33f0649-1daa-4421-a0b9-2e688dc26b3f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2154.824821] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2154.837439] env[62684]: DEBUG nova.compute.provider_tree [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2155.097067] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1b42dd61-ffd5-49fa-89ce-b58bf26463a0 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "aebbc2cc-8973-4907-9ec8-085027fd7ca3" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2155.097337] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1b42dd61-ffd5-49fa-89ce-b58bf26463a0 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "aebbc2cc-8973-4907-9ec8-085027fd7ca3" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2155.228653] env[62684]: DEBUG nova.network.neutron [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Port f121aad3-8e11-4583-8919-c502deebb5e2 binding to destination host cpu-1 is already ACTIVE {{(pid=62684) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2155.234201] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71379fa2-a79a-4159-9449-2920b71b4365 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Lock "7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.162s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2155.265023] env[62684]: DEBUG nova.network.neutron [-] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2155.314053] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5ab7521b-0b88-4350-80dc-11e4471839f4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.323156] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc4a805e-6bbb-45b9-974a-dfc7b49d6325 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.340301] env[62684]: DEBUG nova.scheduler.client.report [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2155.355189] env[62684]: DEBUG nova.compute.manager [req-dd49be90-e521-43bd-b05d-51cb94f5062f req-8fed32cc-85f1-464e-96a4-e57941ac4dc9 service nova] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Detach interface failed, port_id=866e13c3-fdd0-4c15-97db-5002d50eabeb, reason: Instance 9418b42d-9fff-41fd-92d1-a832017fc9c3 could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2155.356802] env[62684]: DEBUG oslo_concurrency.lockutils [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.917s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2155.357295] env[62684]: DEBUG nova.compute.manager [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2155.359910] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.535s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2155.361402] env[62684]: INFO nova.compute.claims [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2155.465485] env[62684]: DEBUG nova.compute.manager [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2155.486233] env[62684]: DEBUG nova.virt.hardware [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2155.486545] env[62684]: DEBUG nova.virt.hardware [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2155.486713] env[62684]: DEBUG nova.virt.hardware [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2155.486904] env[62684]: DEBUG nova.virt.hardware [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2155.487069] env[62684]: DEBUG nova.virt.hardware [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2155.487227] env[62684]: DEBUG nova.virt.hardware [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2155.487435] env[62684]: DEBUG nova.virt.hardware [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2155.487603] env[62684]: DEBUG nova.virt.hardware [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2155.487785] env[62684]: DEBUG nova.virt.hardware [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2155.487969] env[62684]: DEBUG nova.virt.hardware [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2155.488178] env[62684]: DEBUG nova.virt.hardware [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2155.489099] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f5f220c-506e-49ba-9c96-b2c424ae764f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.499711] env[62684]: DEBUG oslo_vmware.rw_handles [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520a84c4-3c81-344d-cd95-11dfad162211/disk-0.vmdk. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2155.500969] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d7057f-9e34-4d00-8fd2-4baeb52ba4ce {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.506287] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec3746f9-ce9d-4095-adbf-7a921c19c793 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.512107] env[62684]: DEBUG oslo_vmware.rw_handles [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520a84c4-3c81-344d-cd95-11dfad162211/disk-0.vmdk is in state: ready. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2155.512279] env[62684]: ERROR oslo_vmware.rw_handles [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520a84c4-3c81-344d-cd95-11dfad162211/disk-0.vmdk due to incomplete transfer. [ 2155.521089] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-2afbf20d-d9ee-4209-b1e9-5b53d450c668 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.527866] env[62684]: DEBUG oslo_vmware.rw_handles [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520a84c4-3c81-344d-cd95-11dfad162211/disk-0.vmdk. {{(pid=62684) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2155.528077] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Uploaded image b5ede0c6-ad0d-4c75-b005-a332dfdc71df to the Glance image server {{(pid=62684) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2155.530456] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Destroying the VM {{(pid=62684) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2155.530712] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-50ccfdb6-9f25-4f08-aea0-cfda32822cf9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.536877] env[62684]: DEBUG oslo_vmware.api [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 2155.536877] env[62684]: value = "task-2053420" [ 2155.536877] env[62684]: _type = "Task" [ 2155.536877] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2155.545123] env[62684]: INFO nova.compute.manager [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Rescuing [ 2155.545362] env[62684]: DEBUG oslo_concurrency.lockutils [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Acquiring lock "refresh_cache-7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2155.545525] env[62684]: DEBUG oslo_concurrency.lockutils [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Acquired lock "refresh_cache-7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2155.545698] env[62684]: DEBUG nova.network.neutron [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2155.546952] env[62684]: DEBUG oslo_vmware.api [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053420, 'name': Destroy_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2155.602725] env[62684]: DEBUG nova.compute.utils [None req-1b42dd61-ffd5-49fa-89ce-b58bf26463a0 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2155.767326] env[62684]: INFO nova.compute.manager [-] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Took 1.43 seconds to deallocate network for instance. [ 2155.868399] env[62684]: DEBUG nova.compute.utils [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2155.870155] env[62684]: DEBUG nova.compute.manager [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2155.870323] env[62684]: DEBUG nova.network.neutron [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2155.937438] env[62684]: DEBUG nova.policy [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e957449ae9d24bdaba38b3db704d3d61', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5cb4900a999e467bafdfd1fb407a82f4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2156.047103] env[62684]: DEBUG oslo_vmware.api [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053420, 'name': Destroy_Task, 'duration_secs': 0.495772} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2156.047543] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Destroyed the VM [ 2156.047833] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Deleting Snapshot of the VM instance {{(pid=62684) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2156.050189] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-19a56f5b-45ea-456f-97f7-177f2353f4cd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2156.060267] env[62684]: DEBUG oslo_vmware.api [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 2156.060267] env[62684]: value = "task-2053421" [ 2156.060267] env[62684]: _type = "Task" [ 2156.060267] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2156.068927] env[62684]: DEBUG oslo_vmware.api [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053421, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2156.106013] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1b42dd61-ffd5-49fa-89ce-b58bf26463a0 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "aebbc2cc-8973-4907-9ec8-085027fd7ca3" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2156.253208] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "0a8d7c48-cf90-4baf-a900-38fbd62869a6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2156.253662] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "0a8d7c48-cf90-4baf-a900-38fbd62869a6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2156.253942] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "0a8d7c48-cf90-4baf-a900-38fbd62869a6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2156.264354] env[62684]: DEBUG nova.network.neutron [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Updating instance_info_cache with network_info: [{"id": "9755599a-bf6c-415f-b6dc-88d5d3774944", "address": "fa:16:3e:1d:6c:e1", "network": {"id": "8eebb0b3-51e4-44c0-a4b4-b45647332a9e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-398889609-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "43304d5c52344bd9841dbc760a174b4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9755599a-bf", "ovs_interfaceid": "9755599a-bf6c-415f-b6dc-88d5d3774944", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2156.274950] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7a702f72-6005-4ade-9cf7-743e2d6658d4 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2156.375764] env[62684]: DEBUG nova.compute.manager [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2156.443447] env[62684]: DEBUG nova.network.neutron [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Successfully updated port: 4be5b16b-2fb8-4e61-a763-f521de30fe81 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2156.565576] env[62684]: DEBUG nova.network.neutron [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Successfully created port: efda8f4d-97b4-44f8-b30b-d26145e98e58 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2156.575876] env[62684]: DEBUG oslo_vmware.api [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053421, 'name': RemoveSnapshot_Task, 'duration_secs': 0.354331} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2156.576791] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Deleted Snapshot of the VM instance {{(pid=62684) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2156.578378] env[62684]: DEBUG nova.compute.manager [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2156.578378] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8af6821-cf2f-4b10-a88b-35e4aefb45a2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2156.642462] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6099b5c-974b-494c-be46-2c6a1bcd3409 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2156.652014] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57aefddb-99f6-452c-b2f0-7efeddc27817 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2156.690611] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7216295b-4b89-4635-80e0-0bf8adbd14b9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2156.703156] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed633802-ae6d-46ff-b886-735580c7374d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2156.719556] env[62684]: DEBUG nova.compute.provider_tree [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2156.767357] env[62684]: DEBUG oslo_concurrency.lockutils [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Releasing lock "refresh_cache-7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2156.947355] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Acquiring lock "refresh_cache-7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2156.947355] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Acquired lock "refresh_cache-7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2156.947355] env[62684]: DEBUG nova.network.neutron [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2157.053799] env[62684]: DEBUG nova.compute.manager [req-837fc913-a615-4628-878a-6f4cce86cd22 req-b65f2bf7-4e62-448c-ab63-6d4313487f48 service nova] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Received event network-vif-plugged-4be5b16b-2fb8-4e61-a763-f521de30fe81 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2157.054038] env[62684]: DEBUG oslo_concurrency.lockutils [req-837fc913-a615-4628-878a-6f4cce86cd22 req-b65f2bf7-4e62-448c-ab63-6d4313487f48 service nova] Acquiring lock "7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2157.054256] env[62684]: DEBUG oslo_concurrency.lockutils [req-837fc913-a615-4628-878a-6f4cce86cd22 req-b65f2bf7-4e62-448c-ab63-6d4313487f48 service nova] Lock "7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2157.054430] env[62684]: DEBUG oslo_concurrency.lockutils [req-837fc913-a615-4628-878a-6f4cce86cd22 req-b65f2bf7-4e62-448c-ab63-6d4313487f48 service nova] Lock "7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2157.054601] env[62684]: DEBUG nova.compute.manager [req-837fc913-a615-4628-878a-6f4cce86cd22 req-b65f2bf7-4e62-448c-ab63-6d4313487f48 service nova] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] No waiting events found dispatching network-vif-plugged-4be5b16b-2fb8-4e61-a763-f521de30fe81 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2157.054780] env[62684]: WARNING nova.compute.manager [req-837fc913-a615-4628-878a-6f4cce86cd22 req-b65f2bf7-4e62-448c-ab63-6d4313487f48 service nova] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Received unexpected event network-vif-plugged-4be5b16b-2fb8-4e61-a763-f521de30fe81 for instance with vm_state building and task_state spawning. [ 2157.054945] env[62684]: DEBUG nova.compute.manager [req-837fc913-a615-4628-878a-6f4cce86cd22 req-b65f2bf7-4e62-448c-ab63-6d4313487f48 service nova] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Received event network-changed-4be5b16b-2fb8-4e61-a763-f521de30fe81 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2157.055117] env[62684]: DEBUG nova.compute.manager [req-837fc913-a615-4628-878a-6f4cce86cd22 req-b65f2bf7-4e62-448c-ab63-6d4313487f48 service nova] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Refreshing instance network info cache due to event network-changed-4be5b16b-2fb8-4e61-a763-f521de30fe81. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2157.055287] env[62684]: DEBUG oslo_concurrency.lockutils [req-837fc913-a615-4628-878a-6f4cce86cd22 req-b65f2bf7-4e62-448c-ab63-6d4313487f48 service nova] Acquiring lock "refresh_cache-7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2157.092125] env[62684]: INFO nova.compute.manager [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Shelve offloading [ 2157.093932] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2157.094222] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7749ec07-bcf2-4ce2-b938-289b8dba6f4b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.101843] env[62684]: DEBUG oslo_vmware.api [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 2157.101843] env[62684]: value = "task-2053422" [ 2157.101843] env[62684]: _type = "Task" [ 2157.101843] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2157.109954] env[62684]: DEBUG oslo_vmware.api [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053422, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2157.198099] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1b42dd61-ffd5-49fa-89ce-b58bf26463a0 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "aebbc2cc-8973-4907-9ec8-085027fd7ca3" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2157.198416] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1b42dd61-ffd5-49fa-89ce-b58bf26463a0 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "aebbc2cc-8973-4907-9ec8-085027fd7ca3" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2157.198668] env[62684]: INFO nova.compute.manager [None req-1b42dd61-ffd5-49fa-89ce-b58bf26463a0 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Attaching volume e8dc2ee3-f51f-4210-9390-c40af41c56da to /dev/sdb [ 2157.224206] env[62684]: DEBUG nova.scheduler.client.report [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2157.237343] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14127ebf-38e2-45f3-aa5e-18e52ba79b9b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.245957] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a3513f1-0a6e-461b-a5bd-422df2d79f8d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.262684] env[62684]: DEBUG nova.virt.block_device [None req-1b42dd61-ffd5-49fa-89ce-b58bf26463a0 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Updating existing volume attachment record: 940b98f5-3496-46ff-aa57-9fd92a94aa87 {{(pid=62684) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2157.304940] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2157.307034] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "refresh_cache-0a8d7c48-cf90-4baf-a900-38fbd62869a6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2157.307034] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquired lock "refresh_cache-0a8d7c48-cf90-4baf-a900-38fbd62869a6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2157.307034] env[62684]: DEBUG nova.network.neutron [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2157.307665] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2d9126cf-c305-4a2e-b9a1-b1d074a125a9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.315982] env[62684]: DEBUG oslo_vmware.api [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for the task: (returnval){ [ 2157.315982] env[62684]: value = "task-2053423" [ 2157.315982] env[62684]: _type = "Task" [ 2157.315982] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2157.326786] env[62684]: DEBUG oslo_vmware.api [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053423, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2157.387216] env[62684]: DEBUG nova.compute.manager [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2157.418657] env[62684]: DEBUG nova.virt.hardware [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2157.418939] env[62684]: DEBUG nova.virt.hardware [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2157.419120] env[62684]: DEBUG nova.virt.hardware [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2157.419315] env[62684]: DEBUG nova.virt.hardware [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2157.419562] env[62684]: DEBUG nova.virt.hardware [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2157.419757] env[62684]: DEBUG nova.virt.hardware [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2157.419979] env[62684]: DEBUG nova.virt.hardware [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2157.420163] env[62684]: DEBUG nova.virt.hardware [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2157.420341] env[62684]: DEBUG nova.virt.hardware [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2157.420515] env[62684]: DEBUG nova.virt.hardware [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2157.420749] env[62684]: DEBUG nova.virt.hardware [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2157.421612] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-194e3e76-29cc-49d5-a4b3-f0c071f0ae05 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.429431] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a2ca8eb-dcdd-4184-abb9-f4cd71535d5d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.612386] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] VM already powered off {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2157.612761] env[62684]: DEBUG nova.compute.manager [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2157.613406] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9f2bc51-50c6-4d38-8fe6-632da4a21ac0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.619249] env[62684]: DEBUG oslo_concurrency.lockutils [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquiring lock "refresh_cache-daf1486b-d5c2-4341-8a27-36eeeb08cd26" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2157.619382] env[62684]: DEBUG oslo_concurrency.lockutils [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquired lock "refresh_cache-daf1486b-d5c2-4341-8a27-36eeeb08cd26" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2157.619562] env[62684]: DEBUG nova.network.neutron [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2157.667473] env[62684]: DEBUG nova.network.neutron [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2157.729029] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.369s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2157.729631] env[62684]: DEBUG nova.compute.manager [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2157.732382] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7a702f72-6005-4ade-9cf7-743e2d6658d4 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.458s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2157.732715] env[62684]: DEBUG nova.objects.instance [None req-7a702f72-6005-4ade-9cf7-743e2d6658d4 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lazy-loading 'resources' on Instance uuid 9418b42d-9fff-41fd-92d1-a832017fc9c3 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2157.786284] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c2ca817e-037d-4b4b-80db-8d8f9666431e tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquiring lock "2baabe7a-ed33-4cef-9acc-a7b804610b0a" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2157.786403] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c2ca817e-037d-4b4b-80db-8d8f9666431e tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lock "2baabe7a-ed33-4cef-9acc-a7b804610b0a" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2157.824026] env[62684]: DEBUG nova.network.neutron [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Updating instance_info_cache with network_info: [{"id": "4be5b16b-2fb8-4e61-a763-f521de30fe81", "address": "fa:16:3e:25:39:fa", "network": {"id": "eb5671f2-a648-452e-a4c0-e81eb932b49c", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-524881696-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "359bcaa2eeb64bcbb6602062777b852e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "001929c7-0dc4-4b73-a9f1-d672f8377985", "external-id": "nsx-vlan-transportzone-230", "segmentation_id": 230, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4be5b16b-2f", "ovs_interfaceid": "4be5b16b-2fb8-4e61-a763-f521de30fe81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2157.829886] env[62684]: DEBUG oslo_vmware.api [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053423, 'name': PowerOffVM_Task, 'duration_secs': 0.301055} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2157.830171] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2157.830964] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f985732b-62f5-4cbd-8d57-2d3a6abd9792 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.853920] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8d64a63-9e44-4e2c-ae68-f02dbe8cd193 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.865377] env[62684]: DEBUG oslo_concurrency.lockutils [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "99a9653c-7221-4495-be5f-5441dc8da0f4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2157.865485] env[62684]: DEBUG oslo_concurrency.lockutils [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "99a9653c-7221-4495-be5f-5441dc8da0f4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2157.890097] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2157.890404] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7dd0f7ce-ae10-40ac-a666-9eaecc5131b6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.898105] env[62684]: DEBUG oslo_vmware.api [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for the task: (returnval){ [ 2157.898105] env[62684]: value = "task-2053427" [ 2157.898105] env[62684]: _type = "Task" [ 2157.898105] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2157.906205] env[62684]: DEBUG oslo_vmware.api [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053427, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2158.024346] env[62684]: DEBUG nova.network.neutron [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Updating instance_info_cache with network_info: [{"id": "f121aad3-8e11-4583-8919-c502deebb5e2", "address": "fa:16:3e:08:50:be", "network": {"id": "7678b347-6a54-4b84-9a4d-b566bbeb1ea4", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-51664912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76d88ac878d44480b3b54b24ab87efa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf121aad3-8e", "ovs_interfaceid": "f121aad3-8e11-4583-8919-c502deebb5e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2158.235126] env[62684]: DEBUG nova.compute.utils [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2158.236489] env[62684]: DEBUG nova.compute.manager [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2158.236673] env[62684]: DEBUG nova.network.neutron [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2158.290248] env[62684]: DEBUG nova.compute.utils [None req-c2ca817e-037d-4b4b-80db-8d8f9666431e tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2158.319943] env[62684]: DEBUG nova.policy [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b6ff6082c4844de797e8aee5e8bd43fd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '540d70f4b6274c38a5e79c00e389d8fe', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2158.326811] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Releasing lock "refresh_cache-7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2158.327215] env[62684]: DEBUG nova.compute.manager [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Instance network_info: |[{"id": "4be5b16b-2fb8-4e61-a763-f521de30fe81", "address": "fa:16:3e:25:39:fa", "network": {"id": "eb5671f2-a648-452e-a4c0-e81eb932b49c", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-524881696-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "359bcaa2eeb64bcbb6602062777b852e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "001929c7-0dc4-4b73-a9f1-d672f8377985", "external-id": "nsx-vlan-transportzone-230", "segmentation_id": 230, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4be5b16b-2f", "ovs_interfaceid": "4be5b16b-2fb8-4e61-a763-f521de30fe81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2158.327687] env[62684]: DEBUG oslo_concurrency.lockutils [req-837fc913-a615-4628-878a-6f4cce86cd22 req-b65f2bf7-4e62-448c-ab63-6d4313487f48 service nova] Acquired lock "refresh_cache-7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2158.327925] env[62684]: DEBUG nova.network.neutron [req-837fc913-a615-4628-878a-6f4cce86cd22 req-b65f2bf7-4e62-448c-ab63-6d4313487f48 service nova] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Refreshing network info cache for port 4be5b16b-2fb8-4e61-a763-f521de30fe81 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2158.329303] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:25:39:fa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '001929c7-0dc4-4b73-a9f1-d672f8377985', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4be5b16b-2fb8-4e61-a763-f521de30fe81', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2158.336947] env[62684]: DEBUG oslo.service.loopingcall [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2158.340067] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2158.340380] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-54740c87-59b1-4f50-905e-c18fcf7007ca {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.364233] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2158.364233] env[62684]: value = "task-2053428" [ 2158.364233] env[62684]: _type = "Task" [ 2158.364233] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2158.368080] env[62684]: DEBUG nova.compute.manager [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2158.380228] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053428, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2158.389785] env[62684]: DEBUG nova.network.neutron [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Updating instance_info_cache with network_info: [{"id": "da310d7c-cd12-49ca-8014-efa9469aef45", "address": "fa:16:3e:5a:6f:ea", "network": {"id": "64494ea7-f6d9-430c-8ac7-e876e763004b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2056829508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.164", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e57b232a7e7647c7a3b2bca3c096feb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda310d7c-cd", "ovs_interfaceid": "da310d7c-cd12-49ca-8014-efa9469aef45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2158.414296] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] VM already powered off {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2158.414620] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2158.414850] env[62684]: DEBUG oslo_concurrency.lockutils [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2158.415051] env[62684]: DEBUG oslo_concurrency.lockutils [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2158.415276] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2158.416686] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fa64fc14-3b3d-4fcb-bc3f-8ef5b121027f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.428898] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2158.429136] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2158.429936] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e939eeb2-9ebb-41ec-9440-9d9b77a59053 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.435497] env[62684]: DEBUG oslo_vmware.api [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for the task: (returnval){ [ 2158.435497] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52edec30-4b9d-21c8-58d8-0ed3e3213a0b" [ 2158.435497] env[62684]: _type = "Task" [ 2158.435497] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2158.443605] env[62684]: DEBUG oslo_vmware.api [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52edec30-4b9d-21c8-58d8-0ed3e3213a0b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2158.527265] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Releasing lock "refresh_cache-0a8d7c48-cf90-4baf-a900-38fbd62869a6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2158.542031] env[62684]: DEBUG nova.network.neutron [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Successfully updated port: efda8f4d-97b4-44f8-b30b-d26145e98e58 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2158.572085] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c3f969e-6dd4-4ec1-ae31-d3ef568285b8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.580775] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-645c3fee-228d-41ce-a80f-378789fcc4d8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.610170] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30aa2490-723b-4a53-990e-bab244802f8a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.617248] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdaaa102-654d-4fd4-ade7-a35e6d8b662c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.631259] env[62684]: DEBUG nova.compute.provider_tree [None req-7a702f72-6005-4ade-9cf7-743e2d6658d4 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2158.711997] env[62684]: DEBUG nova.network.neutron [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Successfully created port: 4424c4bf-2ffd-4b4a-89f5-b6e9c9faa84b {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2158.739875] env[62684]: DEBUG nova.compute.manager [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2158.793021] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c2ca817e-037d-4b4b-80db-8d8f9666431e tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lock "2baabe7a-ed33-4cef-9acc-a7b804610b0a" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2158.878759] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053428, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2158.892641] env[62684]: DEBUG oslo_concurrency.lockutils [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2158.893175] env[62684]: DEBUG oslo_concurrency.lockutils [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Releasing lock "refresh_cache-daf1486b-d5c2-4341-8a27-36eeeb08cd26" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2158.946407] env[62684]: DEBUG oslo_vmware.api [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52edec30-4b9d-21c8-58d8-0ed3e3213a0b, 'name': SearchDatastore_Task, 'duration_secs': 0.010968} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2158.947261] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-efd10482-ce86-4254-a1db-dbcdf74be805 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.952960] env[62684]: DEBUG oslo_vmware.api [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for the task: (returnval){ [ 2158.952960] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e1ba86-99b5-fedb-91c0-50ca565cd63e" [ 2158.952960] env[62684]: _type = "Task" [ 2158.952960] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2158.962713] env[62684]: DEBUG oslo_vmware.api [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e1ba86-99b5-fedb-91c0-50ca565cd63e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2159.048167] env[62684]: DEBUG oslo_concurrency.lockutils [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "refresh_cache-d7f09d0e-f7b6-415e-8d82-47eba1153aa1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2159.048545] env[62684]: DEBUG oslo_concurrency.lockutils [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquired lock "refresh_cache-d7f09d0e-f7b6-415e-8d82-47eba1153aa1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2159.048545] env[62684]: DEBUG nova.network.neutron [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2159.052800] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6f43002-d91d-4944-8219-373d50bc4b64 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.074167] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c678cc21-84b3-4efb-b1be-5f4080ed2c22 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.081958] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Updating instance '0a8d7c48-cf90-4baf-a900-38fbd62869a6' progress to 83 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2159.134601] env[62684]: DEBUG nova.scheduler.client.report [None req-7a702f72-6005-4ade-9cf7-743e2d6658d4 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2159.260773] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2159.261736] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-989ea5a0-cb8a-4169-8174-0c1a266eb26b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.269628] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2159.269870] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3a9709b0-77b6-4b5a-873a-5487c0d11680 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.305821] env[62684]: DEBUG nova.network.neutron [req-837fc913-a615-4628-878a-6f4cce86cd22 req-b65f2bf7-4e62-448c-ab63-6d4313487f48 service nova] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Updated VIF entry in instance network info cache for port 4be5b16b-2fb8-4e61-a763-f521de30fe81. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2159.306215] env[62684]: DEBUG nova.network.neutron [req-837fc913-a615-4628-878a-6f4cce86cd22 req-b65f2bf7-4e62-448c-ab63-6d4313487f48 service nova] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Updating instance_info_cache with network_info: [{"id": "4be5b16b-2fb8-4e61-a763-f521de30fe81", "address": "fa:16:3e:25:39:fa", "network": {"id": "eb5671f2-a648-452e-a4c0-e81eb932b49c", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-524881696-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "359bcaa2eeb64bcbb6602062777b852e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "001929c7-0dc4-4b73-a9f1-d672f8377985", "external-id": "nsx-vlan-transportzone-230", "segmentation_id": 230, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4be5b16b-2f", "ovs_interfaceid": "4be5b16b-2fb8-4e61-a763-f521de30fe81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2159.371478] env[62684]: DEBUG nova.compute.manager [req-b3a52419-68ba-4308-b9fb-9ea180327973 req-f7de1613-b3dd-4687-b399-37c5ffbaa3c0 service nova] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Received event network-vif-plugged-efda8f4d-97b4-44f8-b30b-d26145e98e58 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2159.371708] env[62684]: DEBUG oslo_concurrency.lockutils [req-b3a52419-68ba-4308-b9fb-9ea180327973 req-f7de1613-b3dd-4687-b399-37c5ffbaa3c0 service nova] Acquiring lock "d7f09d0e-f7b6-415e-8d82-47eba1153aa1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2159.371926] env[62684]: DEBUG oslo_concurrency.lockutils [req-b3a52419-68ba-4308-b9fb-9ea180327973 req-f7de1613-b3dd-4687-b399-37c5ffbaa3c0 service nova] Lock "d7f09d0e-f7b6-415e-8d82-47eba1153aa1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2159.372115] env[62684]: DEBUG oslo_concurrency.lockutils [req-b3a52419-68ba-4308-b9fb-9ea180327973 req-f7de1613-b3dd-4687-b399-37c5ffbaa3c0 service nova] Lock "d7f09d0e-f7b6-415e-8d82-47eba1153aa1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2159.372288] env[62684]: DEBUG nova.compute.manager [req-b3a52419-68ba-4308-b9fb-9ea180327973 req-f7de1613-b3dd-4687-b399-37c5ffbaa3c0 service nova] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] No waiting events found dispatching network-vif-plugged-efda8f4d-97b4-44f8-b30b-d26145e98e58 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2159.372456] env[62684]: WARNING nova.compute.manager [req-b3a52419-68ba-4308-b9fb-9ea180327973 req-f7de1613-b3dd-4687-b399-37c5ffbaa3c0 service nova] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Received unexpected event network-vif-plugged-efda8f4d-97b4-44f8-b30b-d26145e98e58 for instance with vm_state building and task_state spawning. [ 2159.372697] env[62684]: DEBUG nova.compute.manager [req-b3a52419-68ba-4308-b9fb-9ea180327973 req-f7de1613-b3dd-4687-b399-37c5ffbaa3c0 service nova] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Received event network-changed-efda8f4d-97b4-44f8-b30b-d26145e98e58 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2159.372880] env[62684]: DEBUG nova.compute.manager [req-b3a52419-68ba-4308-b9fb-9ea180327973 req-f7de1613-b3dd-4687-b399-37c5ffbaa3c0 service nova] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Refreshing instance network info cache due to event network-changed-efda8f4d-97b4-44f8-b30b-d26145e98e58. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2159.373067] env[62684]: DEBUG oslo_concurrency.lockutils [req-b3a52419-68ba-4308-b9fb-9ea180327973 req-f7de1613-b3dd-4687-b399-37c5ffbaa3c0 service nova] Acquiring lock "refresh_cache-d7f09d0e-f7b6-415e-8d82-47eba1153aa1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2159.381250] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053428, 'name': CreateVM_Task, 'duration_secs': 0.735993} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2159.381402] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2159.382031] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2159.382202] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2159.382511] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2159.382965] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc5d5fcb-c292-4736-9a81-65dcae989f64 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.387810] env[62684]: DEBUG oslo_vmware.api [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for the task: (returnval){ [ 2159.387810] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52915074-f544-a4f2-3469-ead3793de035" [ 2159.387810] env[62684]: _type = "Task" [ 2159.387810] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2159.395962] env[62684]: DEBUG oslo_vmware.api [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52915074-f544-a4f2-3469-ead3793de035, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2159.419831] env[62684]: DEBUG nova.compute.manager [req-8667dc50-824e-4a94-9951-7b486d227f9d req-cb2a5b7a-76ef-48ce-9da9-7f806b584133 service nova] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Received event network-vif-unplugged-da310d7c-cd12-49ca-8014-efa9469aef45 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2159.420081] env[62684]: DEBUG oslo_concurrency.lockutils [req-8667dc50-824e-4a94-9951-7b486d227f9d req-cb2a5b7a-76ef-48ce-9da9-7f806b584133 service nova] Acquiring lock "daf1486b-d5c2-4341-8a27-36eeeb08cd26-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2159.420302] env[62684]: DEBUG oslo_concurrency.lockutils [req-8667dc50-824e-4a94-9951-7b486d227f9d req-cb2a5b7a-76ef-48ce-9da9-7f806b584133 service nova] Lock "daf1486b-d5c2-4341-8a27-36eeeb08cd26-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2159.420729] env[62684]: DEBUG oslo_concurrency.lockutils [req-8667dc50-824e-4a94-9951-7b486d227f9d req-cb2a5b7a-76ef-48ce-9da9-7f806b584133 service nova] Lock "daf1486b-d5c2-4341-8a27-36eeeb08cd26-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2159.420884] env[62684]: DEBUG nova.compute.manager [req-8667dc50-824e-4a94-9951-7b486d227f9d req-cb2a5b7a-76ef-48ce-9da9-7f806b584133 service nova] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] No waiting events found dispatching network-vif-unplugged-da310d7c-cd12-49ca-8014-efa9469aef45 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2159.421075] env[62684]: WARNING nova.compute.manager [req-8667dc50-824e-4a94-9951-7b486d227f9d req-cb2a5b7a-76ef-48ce-9da9-7f806b584133 service nova] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Received unexpected event network-vif-unplugged-da310d7c-cd12-49ca-8014-efa9469aef45 for instance with vm_state shelved and task_state shelving_offloading. [ 2159.422780] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2159.422980] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2159.423180] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Deleting the datastore file [datastore2] daf1486b-d5c2-4341-8a27-36eeeb08cd26 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2159.423433] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6b4cb38b-76af-4927-bb90-671d9e3aa1df {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.431025] env[62684]: DEBUG oslo_vmware.api [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 2159.431025] env[62684]: value = "task-2053430" [ 2159.431025] env[62684]: _type = "Task" [ 2159.431025] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2159.441455] env[62684]: DEBUG oslo_vmware.api [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053430, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2159.462804] env[62684]: DEBUG oslo_vmware.api [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e1ba86-99b5-fedb-91c0-50ca565cd63e, 'name': SearchDatastore_Task, 'duration_secs': 0.008921} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2159.463097] env[62684]: DEBUG oslo_concurrency.lockutils [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2159.463369] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd/3931321c-cb4c-4b87-8d3a-50e05ea01db2-rescue.vmdk. {{(pid=62684) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 2159.463632] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d5914a7b-0609-48e7-a593-940c5764b9d6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.469836] env[62684]: DEBUG oslo_vmware.api [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for the task: (returnval){ [ 2159.469836] env[62684]: value = "task-2053431" [ 2159.469836] env[62684]: _type = "Task" [ 2159.469836] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2159.477704] env[62684]: DEBUG oslo_vmware.api [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053431, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2159.588844] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2159.589840] env[62684]: DEBUG nova.network.neutron [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2159.592261] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-722072e0-38a8-4578-8167-59ccb6e535ce {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.598552] env[62684]: DEBUG oslo_vmware.api [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2159.598552] env[62684]: value = "task-2053433" [ 2159.598552] env[62684]: _type = "Task" [ 2159.598552] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2159.606592] env[62684]: DEBUG oslo_vmware.api [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053433, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2159.641978] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7a702f72-6005-4ade-9cf7-743e2d6658d4 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.909s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2159.644207] env[62684]: DEBUG oslo_concurrency.lockutils [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.752s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2159.645745] env[62684]: INFO nova.compute.claims [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2159.666126] env[62684]: INFO nova.scheduler.client.report [None req-7a702f72-6005-4ade-9cf7-743e2d6658d4 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Deleted allocations for instance 9418b42d-9fff-41fd-92d1-a832017fc9c3 [ 2159.753020] env[62684]: DEBUG nova.compute.manager [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2159.756595] env[62684]: DEBUG nova.network.neutron [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Updating instance_info_cache with network_info: [{"id": "efda8f4d-97b4-44f8-b30b-d26145e98e58", "address": "fa:16:3e:be:97:33", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefda8f4d-97", "ovs_interfaceid": "efda8f4d-97b4-44f8-b30b-d26145e98e58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2159.781423] env[62684]: DEBUG nova.virt.hardware [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2159.781688] env[62684]: DEBUG nova.virt.hardware [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2159.781851] env[62684]: DEBUG nova.virt.hardware [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2159.782352] env[62684]: DEBUG nova.virt.hardware [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2159.782352] env[62684]: DEBUG nova.virt.hardware [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2159.782352] env[62684]: DEBUG nova.virt.hardware [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2159.782558] env[62684]: DEBUG nova.virt.hardware [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2159.782724] env[62684]: DEBUG nova.virt.hardware [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2159.783055] env[62684]: DEBUG nova.virt.hardware [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2159.783130] env[62684]: DEBUG nova.virt.hardware [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2159.783323] env[62684]: DEBUG nova.virt.hardware [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2159.784524] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-282db6db-8de9-4b56-9a85-db6687b5b989 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.793803] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f92cfbde-b405-467c-9c3d-a69a1b8c64fd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.811926] env[62684]: DEBUG oslo_concurrency.lockutils [req-837fc913-a615-4628-878a-6f4cce86cd22 req-b65f2bf7-4e62-448c-ab63-6d4313487f48 service nova] Releasing lock "refresh_cache-7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2159.852606] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c2ca817e-037d-4b4b-80db-8d8f9666431e tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquiring lock "2baabe7a-ed33-4cef-9acc-a7b804610b0a" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2159.852957] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c2ca817e-037d-4b4b-80db-8d8f9666431e tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lock "2baabe7a-ed33-4cef-9acc-a7b804610b0a" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2159.853245] env[62684]: INFO nova.compute.manager [None req-c2ca817e-037d-4b4b-80db-8d8f9666431e tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Attaching volume 004e22f3-56ef-4e39-994c-bd13ad8bbc11 to /dev/sdb [ 2159.899360] env[62684]: DEBUG oslo_vmware.api [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52915074-f544-a4f2-3469-ead3793de035, 'name': SearchDatastore_Task, 'duration_secs': 0.011743} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2159.900709] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2159.900965] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2159.901226] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2159.901381] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2159.901570] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2159.902390] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-406d1567-9651-4cc8-b435-0d9b9a6e8b1f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.905045] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2a642e08-081c-4f30-afb2-172251757702 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.912985] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40558090-2395-4f1d-b57e-03d1ecbae461 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.918605] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2159.918828] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2159.919628] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44027d5b-a3dd-447b-9f87-c4766b862502 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.926671] env[62684]: DEBUG nova.virt.block_device [None req-c2ca817e-037d-4b4b-80db-8d8f9666431e tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Updating existing volume attachment record: 979f57a5-43fd-404e-810d-8ed582fa8c00 {{(pid=62684) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2159.930601] env[62684]: DEBUG oslo_vmware.api [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for the task: (returnval){ [ 2159.930601] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52126717-bfe6-f107-9290-18794c431990" [ 2159.930601] env[62684]: _type = "Task" [ 2159.930601] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2159.948350] env[62684]: DEBUG oslo_vmware.api [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053430, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155266} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2159.948432] env[62684]: DEBUG oslo_vmware.api [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52126717-bfe6-f107-9290-18794c431990, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2159.948691] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2159.948906] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2159.949108] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2159.975315] env[62684]: INFO nova.scheduler.client.report [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Deleted allocations for instance daf1486b-d5c2-4341-8a27-36eeeb08cd26 [ 2159.984290] env[62684]: DEBUG oslo_vmware.api [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053431, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.491897} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2159.984583] env[62684]: INFO nova.virt.vmwareapi.ds_util [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd/3931321c-cb4c-4b87-8d3a-50e05ea01db2-rescue.vmdk. [ 2159.985470] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d54d87f-6974-459e-a611-d6b8159c3201 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.010410] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Reconfiguring VM instance instance-0000005b to attach disk [datastore2] 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd/3931321c-cb4c-4b87-8d3a-50e05ea01db2-rescue.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2160.011503] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-96cbcca1-d991-4af2-b251-895f57dc2e9f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.030969] env[62684]: DEBUG oslo_vmware.api [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for the task: (returnval){ [ 2160.030969] env[62684]: value = "task-2053434" [ 2160.030969] env[62684]: _type = "Task" [ 2160.030969] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2160.039329] env[62684]: DEBUG oslo_vmware.api [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053434, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2160.109201] env[62684]: DEBUG oslo_vmware.api [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053433, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2160.176154] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7a702f72-6005-4ade-9cf7-743e2d6658d4 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "9418b42d-9fff-41fd-92d1-a832017fc9c3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.047s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2160.261751] env[62684]: DEBUG oslo_concurrency.lockutils [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Releasing lock "refresh_cache-d7f09d0e-f7b6-415e-8d82-47eba1153aa1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2160.261751] env[62684]: DEBUG nova.compute.manager [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Instance network_info: |[{"id": "efda8f4d-97b4-44f8-b30b-d26145e98e58", "address": "fa:16:3e:be:97:33", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefda8f4d-97", "ovs_interfaceid": "efda8f4d-97b4-44f8-b30b-d26145e98e58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2160.261751] env[62684]: DEBUG oslo_concurrency.lockutils [req-b3a52419-68ba-4308-b9fb-9ea180327973 req-f7de1613-b3dd-4687-b399-37c5ffbaa3c0 service nova] Acquired lock "refresh_cache-d7f09d0e-f7b6-415e-8d82-47eba1153aa1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2160.261751] env[62684]: DEBUG nova.network.neutron [req-b3a52419-68ba-4308-b9fb-9ea180327973 req-f7de1613-b3dd-4687-b399-37c5ffbaa3c0 service nova] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Refreshing network info cache for port efda8f4d-97b4-44f8-b30b-d26145e98e58 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2160.262527] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:be:97:33', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f880ac2e-d532-4f54-87bb-998a8d1bca78', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'efda8f4d-97b4-44f8-b30b-d26145e98e58', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2160.272922] env[62684]: DEBUG oslo.service.loopingcall [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2160.277556] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2160.278471] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a579058b-f9a7-4b3e-963f-b2702fc873c8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.301661] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2160.301661] env[62684]: value = "task-2053436" [ 2160.301661] env[62684]: _type = "Task" [ 2160.301661] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2160.312714] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053436, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2160.442606] env[62684]: DEBUG oslo_vmware.api [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52126717-bfe6-f107-9290-18794c431990, 'name': SearchDatastore_Task, 'duration_secs': 0.025611} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2160.443422] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81c8e0e0-ea91-406b-9f2d-7c755b7718ee {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.448904] env[62684]: DEBUG oslo_vmware.api [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for the task: (returnval){ [ 2160.448904] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523f37f4-20df-d7a9-6b37-572ca02435f1" [ 2160.448904] env[62684]: _type = "Task" [ 2160.448904] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2160.460648] env[62684]: DEBUG oslo_vmware.api [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523f37f4-20df-d7a9-6b37-572ca02435f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2160.480619] env[62684]: DEBUG oslo_concurrency.lockutils [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2160.542726] env[62684]: DEBUG oslo_vmware.api [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053434, 'name': ReconfigVM_Task, 'duration_secs': 0.38581} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2160.543038] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Reconfigured VM instance instance-0000005b to attach disk [datastore2] 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd/3931321c-cb4c-4b87-8d3a-50e05ea01db2-rescue.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2160.543905] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0720bf9-1df0-4445-8ad8-40fd7224d2aa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.577124] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8202c9e4-7b51-4aaa-b9cf-5484e9ada39e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.593083] env[62684]: DEBUG oslo_vmware.api [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for the task: (returnval){ [ 2160.593083] env[62684]: value = "task-2053439" [ 2160.593083] env[62684]: _type = "Task" [ 2160.593083] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2160.601373] env[62684]: DEBUG oslo_vmware.api [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053439, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2160.609526] env[62684]: DEBUG oslo_vmware.api [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053433, 'name': PowerOnVM_Task, 'duration_secs': 0.522919} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2160.609811] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2160.610014] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a9be11e2-df9a-46ad-9de4-da1fb0fb540b tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Updating instance '0a8d7c48-cf90-4baf-a900-38fbd62869a6' progress to 100 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2160.815284] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053436, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2160.830060] env[62684]: DEBUG nova.network.neutron [req-b3a52419-68ba-4308-b9fb-9ea180327973 req-f7de1613-b3dd-4687-b399-37c5ffbaa3c0 service nova] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Updated VIF entry in instance network info cache for port efda8f4d-97b4-44f8-b30b-d26145e98e58. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2160.830442] env[62684]: DEBUG nova.network.neutron [req-b3a52419-68ba-4308-b9fb-9ea180327973 req-f7de1613-b3dd-4687-b399-37c5ffbaa3c0 service nova] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Updating instance_info_cache with network_info: [{"id": "efda8f4d-97b4-44f8-b30b-d26145e98e58", "address": "fa:16:3e:be:97:33", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefda8f4d-97", "ovs_interfaceid": "efda8f4d-97b4-44f8-b30b-d26145e98e58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2160.842645] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "f4fab142-8066-43c1-abaa-a9f66775114c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2160.842645] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "f4fab142-8066-43c1-abaa-a9f66775114c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.004s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2160.905872] env[62684]: DEBUG nova.network.neutron [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Successfully updated port: 4424c4bf-2ffd-4b4a-89f5-b6e9c9faa84b {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2160.939415] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a3b6fb6-bb35-440f-ae35-62c5d145757b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.947013] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a79169c-0ebb-4004-8f6b-a7f1b97e24a4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.958968] env[62684]: DEBUG oslo_vmware.api [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523f37f4-20df-d7a9-6b37-572ca02435f1, 'name': SearchDatastore_Task, 'duration_secs': 0.042497} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2160.984198] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2160.984511] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5/7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2160.985360] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f1a456a2-72b1-4bf6-8497-866696400adc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.987831] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-600a525b-2f1e-407d-bd18-7c3497cb588f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.997788] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21ba13b9-2595-4ce2-b35d-fc553a5d96bb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.001606] env[62684]: DEBUG oslo_vmware.api [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for the task: (returnval){ [ 2161.001606] env[62684]: value = "task-2053440" [ 2161.001606] env[62684]: _type = "Task" [ 2161.001606] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2161.014078] env[62684]: DEBUG nova.compute.provider_tree [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2161.020377] env[62684]: DEBUG oslo_vmware.api [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053440, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2161.103050] env[62684]: DEBUG oslo_vmware.api [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053439, 'name': ReconfigVM_Task, 'duration_secs': 0.279138} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2161.103330] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2161.103604] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-599a4fde-7c61-4938-b1f5-e2d80b618106 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.109723] env[62684]: DEBUG oslo_vmware.api [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for the task: (returnval){ [ 2161.109723] env[62684]: value = "task-2053441" [ 2161.109723] env[62684]: _type = "Task" [ 2161.109723] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2161.119902] env[62684]: DEBUG oslo_vmware.api [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053441, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2161.314135] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053436, 'name': CreateVM_Task, 'duration_secs': 0.524301} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2161.314320] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2161.315220] env[62684]: DEBUG oslo_concurrency.lockutils [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2161.315436] env[62684]: DEBUG oslo_concurrency.lockutils [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2161.315753] env[62684]: DEBUG oslo_concurrency.lockutils [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2161.316066] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc6d25a5-30a2-4b1c-9b80-948f8df23a9e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.321289] env[62684]: DEBUG oslo_vmware.api [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 2161.321289] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52fc2ca2-6e3f-9888-8720-b87c1290016e" [ 2161.321289] env[62684]: _type = "Task" [ 2161.321289] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2161.329917] env[62684]: DEBUG oslo_vmware.api [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52fc2ca2-6e3f-9888-8720-b87c1290016e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2161.337755] env[62684]: DEBUG oslo_concurrency.lockutils [req-b3a52419-68ba-4308-b9fb-9ea180327973 req-f7de1613-b3dd-4687-b399-37c5ffbaa3c0 service nova] Releasing lock "refresh_cache-d7f09d0e-f7b6-415e-8d82-47eba1153aa1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2161.346534] env[62684]: DEBUG nova.compute.manager [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2161.409458] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Acquiring lock "refresh_cache-584845d2-d146-42bf-8ef5-58532fe24f65" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2161.409726] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Acquired lock "refresh_cache-584845d2-d146-42bf-8ef5-58532fe24f65" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2161.409836] env[62684]: DEBUG nova.network.neutron [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2161.419453] env[62684]: DEBUG nova.compute.manager [req-db0b073b-4e97-42c8-a43a-4b48f9a3dca3 req-c5c14a89-c939-481c-8146-cac527297eca service nova] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Received event network-vif-plugged-4424c4bf-2ffd-4b4a-89f5-b6e9c9faa84b {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2161.419678] env[62684]: DEBUG oslo_concurrency.lockutils [req-db0b073b-4e97-42c8-a43a-4b48f9a3dca3 req-c5c14a89-c939-481c-8146-cac527297eca service nova] Acquiring lock "584845d2-d146-42bf-8ef5-58532fe24f65-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2161.420023] env[62684]: DEBUG oslo_concurrency.lockutils [req-db0b073b-4e97-42c8-a43a-4b48f9a3dca3 req-c5c14a89-c939-481c-8146-cac527297eca service nova] Lock "584845d2-d146-42bf-8ef5-58532fe24f65-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2161.420195] env[62684]: DEBUG oslo_concurrency.lockutils [req-db0b073b-4e97-42c8-a43a-4b48f9a3dca3 req-c5c14a89-c939-481c-8146-cac527297eca service nova] Lock "584845d2-d146-42bf-8ef5-58532fe24f65-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2161.420408] env[62684]: DEBUG nova.compute.manager [req-db0b073b-4e97-42c8-a43a-4b48f9a3dca3 req-c5c14a89-c939-481c-8146-cac527297eca service nova] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] No waiting events found dispatching network-vif-plugged-4424c4bf-2ffd-4b4a-89f5-b6e9c9faa84b {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2161.420943] env[62684]: WARNING nova.compute.manager [req-db0b073b-4e97-42c8-a43a-4b48f9a3dca3 req-c5c14a89-c939-481c-8146-cac527297eca service nova] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Received unexpected event network-vif-plugged-4424c4bf-2ffd-4b4a-89f5-b6e9c9faa84b for instance with vm_state building and task_state spawning. [ 2161.420943] env[62684]: DEBUG nova.compute.manager [req-db0b073b-4e97-42c8-a43a-4b48f9a3dca3 req-c5c14a89-c939-481c-8146-cac527297eca service nova] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Received event network-changed-4424c4bf-2ffd-4b4a-89f5-b6e9c9faa84b {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2161.420943] env[62684]: DEBUG nova.compute.manager [req-db0b073b-4e97-42c8-a43a-4b48f9a3dca3 req-c5c14a89-c939-481c-8146-cac527297eca service nova] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Refreshing instance network info cache due to event network-changed-4424c4bf-2ffd-4b4a-89f5-b6e9c9faa84b. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2161.421183] env[62684]: DEBUG oslo_concurrency.lockutils [req-db0b073b-4e97-42c8-a43a-4b48f9a3dca3 req-c5c14a89-c939-481c-8146-cac527297eca service nova] Acquiring lock "refresh_cache-584845d2-d146-42bf-8ef5-58532fe24f65" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2161.447151] env[62684]: DEBUG nova.compute.manager [req-e8a1df9c-666a-4793-98a0-4e2b214df7d3 req-b2d40e20-0252-4200-9d26-24c3bbca336c service nova] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Received event network-changed-da310d7c-cd12-49ca-8014-efa9469aef45 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2161.447554] env[62684]: DEBUG nova.compute.manager [req-e8a1df9c-666a-4793-98a0-4e2b214df7d3 req-b2d40e20-0252-4200-9d26-24c3bbca336c service nova] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Refreshing instance network info cache due to event network-changed-da310d7c-cd12-49ca-8014-efa9469aef45. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2161.447803] env[62684]: DEBUG oslo_concurrency.lockutils [req-e8a1df9c-666a-4793-98a0-4e2b214df7d3 req-b2d40e20-0252-4200-9d26-24c3bbca336c service nova] Acquiring lock "refresh_cache-daf1486b-d5c2-4341-8a27-36eeeb08cd26" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2161.447957] env[62684]: DEBUG oslo_concurrency.lockutils [req-e8a1df9c-666a-4793-98a0-4e2b214df7d3 req-b2d40e20-0252-4200-9d26-24c3bbca336c service nova] Acquired lock "refresh_cache-daf1486b-d5c2-4341-8a27-36eeeb08cd26" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2161.448160] env[62684]: DEBUG nova.network.neutron [req-e8a1df9c-666a-4793-98a0-4e2b214df7d3 req-b2d40e20-0252-4200-9d26-24c3bbca336c service nova] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Refreshing network info cache for port da310d7c-cd12-49ca-8014-efa9469aef45 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2161.513976] env[62684]: DEBUG oslo_vmware.api [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053440, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2161.517540] env[62684]: DEBUG nova.scheduler.client.report [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2161.620270] env[62684]: DEBUG oslo_vmware.api [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053441, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2161.811904] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b42dd61-ffd5-49fa-89ce-b58bf26463a0 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Volume attach. Driver type: vmdk {{(pid=62684) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2161.812204] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b42dd61-ffd5-49fa-89ce-b58bf26463a0 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421367', 'volume_id': 'e8dc2ee3-f51f-4210-9390-c40af41c56da', 'name': 'volume-e8dc2ee3-f51f-4210-9390-c40af41c56da', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'aebbc2cc-8973-4907-9ec8-085027fd7ca3', 'attached_at': '', 'detached_at': '', 'volume_id': 'e8dc2ee3-f51f-4210-9390-c40af41c56da', 'serial': 'e8dc2ee3-f51f-4210-9390-c40af41c56da'} {{(pid=62684) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2161.813142] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-570b0a34-0326-489a-85ef-9259797e8dd4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.833275] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f77943bd-5282-4af0-95d9-103ae8cd9b4b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.842335] env[62684]: DEBUG oslo_vmware.api [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52fc2ca2-6e3f-9888-8720-b87c1290016e, 'name': SearchDatastore_Task, 'duration_secs': 0.046798} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2161.854768] env[62684]: DEBUG oslo_concurrency.lockutils [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2161.855087] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2161.855339] env[62684]: DEBUG oslo_concurrency.lockutils [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2161.855497] env[62684]: DEBUG oslo_concurrency.lockutils [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2161.858156] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2161.865545] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b42dd61-ffd5-49fa-89ce-b58bf26463a0 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Reconfiguring VM instance instance-0000005a to attach disk [datastore2] volume-e8dc2ee3-f51f-4210-9390-c40af41c56da/volume-e8dc2ee3-f51f-4210-9390-c40af41c56da.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2161.867977] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5c7655e0-ccc5-4eae-ae0b-5049ccdc8f7f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.870019] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4192195d-cd52-4864-8aaa-0663236c9c61 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.889789] env[62684]: DEBUG oslo_vmware.api [None req-1b42dd61-ffd5-49fa-89ce-b58bf26463a0 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2161.889789] env[62684]: value = "task-2053442" [ 2161.889789] env[62684]: _type = "Task" [ 2161.889789] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2161.894717] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2161.894903] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2161.895947] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3cbcb434-8e7b-4713-9ab0-eb9ed908a82d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.899039] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2161.902449] env[62684]: DEBUG oslo_vmware.api [None req-1b42dd61-ffd5-49fa-89ce-b58bf26463a0 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053442, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2161.905935] env[62684]: DEBUG oslo_vmware.api [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 2161.905935] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52752aff-3c06-5c01-ad9f-4435d2494433" [ 2161.905935] env[62684]: _type = "Task" [ 2161.905935] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2161.916782] env[62684]: DEBUG oslo_vmware.api [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52752aff-3c06-5c01-ad9f-4435d2494433, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2161.969729] env[62684]: DEBUG nova.network.neutron [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2162.014802] env[62684]: DEBUG oslo_vmware.api [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053440, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.647812} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2162.015394] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5/7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2162.015758] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2162.016116] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7a4d256a-338c-40a2-9701-5c08e83231ab {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.022877] env[62684]: DEBUG oslo_concurrency.lockutils [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.379s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2162.023587] env[62684]: DEBUG nova.compute.manager [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2162.029236] env[62684]: DEBUG oslo_concurrency.lockutils [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.549s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2162.029792] env[62684]: DEBUG nova.objects.instance [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lazy-loading 'resources' on Instance uuid daf1486b-d5c2-4341-8a27-36eeeb08cd26 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2162.030969] env[62684]: DEBUG oslo_vmware.api [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for the task: (returnval){ [ 2162.030969] env[62684]: value = "task-2053443" [ 2162.030969] env[62684]: _type = "Task" [ 2162.030969] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2162.040851] env[62684]: DEBUG oslo_vmware.api [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053443, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2162.135152] env[62684]: DEBUG oslo_vmware.api [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053441, 'name': PowerOnVM_Task} progress is 76%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2162.268698] env[62684]: DEBUG nova.network.neutron [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Updating instance_info_cache with network_info: [{"id": "4424c4bf-2ffd-4b4a-89f5-b6e9c9faa84b", "address": "fa:16:3e:e9:be:8c", "network": {"id": "bf53c8de-5f43-4a15-9911-25340615a63b", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1946277195-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "540d70f4b6274c38a5e79c00e389d8fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6db039c-542c-4544-a57d-ddcc6c1e8e45", "external-id": "nsx-vlan-transportzone-810", "segmentation_id": 810, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4424c4bf-2f", "ovs_interfaceid": "4424c4bf-2ffd-4b4a-89f5-b6e9c9faa84b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2162.335409] env[62684]: DEBUG nova.network.neutron [req-e8a1df9c-666a-4793-98a0-4e2b214df7d3 req-b2d40e20-0252-4200-9d26-24c3bbca336c service nova] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Updated VIF entry in instance network info cache for port da310d7c-cd12-49ca-8014-efa9469aef45. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2162.335839] env[62684]: DEBUG nova.network.neutron [req-e8a1df9c-666a-4793-98a0-4e2b214df7d3 req-b2d40e20-0252-4200-9d26-24c3bbca336c service nova] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Updating instance_info_cache with network_info: [{"id": "da310d7c-cd12-49ca-8014-efa9469aef45", "address": "fa:16:3e:5a:6f:ea", "network": {"id": "64494ea7-f6d9-430c-8ac7-e876e763004b", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-2056829508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.164", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e57b232a7e7647c7a3b2bca3c096feb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapda310d7c-cd", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2162.399791] env[62684]: DEBUG oslo_vmware.api [None req-1b42dd61-ffd5-49fa-89ce-b58bf26463a0 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053442, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2162.416805] env[62684]: DEBUG oslo_vmware.api [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52752aff-3c06-5c01-ad9f-4435d2494433, 'name': SearchDatastore_Task, 'duration_secs': 0.022661} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2162.417543] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-956b2ea1-23d7-4433-998a-fbe28d2c1a0b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.422563] env[62684]: DEBUG oslo_vmware.api [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 2162.422563] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523c1143-c699-ca10-e5a6-a7f6dc8b7dd7" [ 2162.422563] env[62684]: _type = "Task" [ 2162.422563] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2162.430036] env[62684]: DEBUG oslo_vmware.api [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523c1143-c699-ca10-e5a6-a7f6dc8b7dd7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2162.532405] env[62684]: DEBUG nova.objects.instance [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lazy-loading 'numa_topology' on Instance uuid daf1486b-d5c2-4341-8a27-36eeeb08cd26 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2162.534629] env[62684]: DEBUG nova.compute.utils [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2162.536344] env[62684]: DEBUG nova.compute.manager [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2162.536582] env[62684]: DEBUG nova.network.neutron [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2162.549089] env[62684]: DEBUG oslo_vmware.api [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053443, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06084} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2162.549412] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2162.550319] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb28fdcb-e052-456a-be2a-5ffe4ffaaacb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.574596] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Reconfiguring VM instance instance-0000005c to attach disk [datastore2] 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5/7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2162.575263] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-40dc5868-b7df-4b8b-be8f-064011be819e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.590813] env[62684]: DEBUG nova.policy [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2fab3230b61d440e93d1d0a975115405', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '27d04006afc747e19ad87238bfdbaad1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2162.598492] env[62684]: DEBUG oslo_vmware.api [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for the task: (returnval){ [ 2162.598492] env[62684]: value = "task-2053445" [ 2162.598492] env[62684]: _type = "Task" [ 2162.598492] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2162.607425] env[62684]: DEBUG oslo_vmware.api [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053445, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2162.622332] env[62684]: DEBUG oslo_vmware.api [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053441, 'name': PowerOnVM_Task, 'duration_secs': 1.135997} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2162.622655] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2162.626575] env[62684]: DEBUG nova.compute.manager [None req-488225bc-a8e7-4254-b6d0-4cfaba69a51a tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2162.627604] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ea7bfd8-4f0b-4d55-85e1-f9438682b5af {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.770983] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Releasing lock "refresh_cache-584845d2-d146-42bf-8ef5-58532fe24f65" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2162.771411] env[62684]: DEBUG nova.compute.manager [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Instance network_info: |[{"id": "4424c4bf-2ffd-4b4a-89f5-b6e9c9faa84b", "address": "fa:16:3e:e9:be:8c", "network": {"id": "bf53c8de-5f43-4a15-9911-25340615a63b", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1946277195-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "540d70f4b6274c38a5e79c00e389d8fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6db039c-542c-4544-a57d-ddcc6c1e8e45", "external-id": "nsx-vlan-transportzone-810", "segmentation_id": 810, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4424c4bf-2f", "ovs_interfaceid": "4424c4bf-2ffd-4b4a-89f5-b6e9c9faa84b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2162.772562] env[62684]: DEBUG oslo_concurrency.lockutils [req-db0b073b-4e97-42c8-a43a-4b48f9a3dca3 req-c5c14a89-c939-481c-8146-cac527297eca service nova] Acquired lock "refresh_cache-584845d2-d146-42bf-8ef5-58532fe24f65" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2162.772562] env[62684]: DEBUG nova.network.neutron [req-db0b073b-4e97-42c8-a43a-4b48f9a3dca3 req-c5c14a89-c939-481c-8146-cac527297eca service nova] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Refreshing network info cache for port 4424c4bf-2ffd-4b4a-89f5-b6e9c9faa84b {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2162.773335] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e9:be:8c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e6db039c-542c-4544-a57d-ddcc6c1e8e45', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4424c4bf-2ffd-4b4a-89f5-b6e9c9faa84b', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2162.781274] env[62684]: DEBUG oslo.service.loopingcall [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2162.781674] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2162.782657] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-911d2872-3df1-4676-84ca-3ace04dd6170 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.804292] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2162.804292] env[62684]: value = "task-2053446" [ 2162.804292] env[62684]: _type = "Task" [ 2162.804292] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2162.815742] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053446, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2162.838468] env[62684]: DEBUG oslo_concurrency.lockutils [req-e8a1df9c-666a-4793-98a0-4e2b214df7d3 req-b2d40e20-0252-4200-9d26-24c3bbca336c service nova] Releasing lock "refresh_cache-daf1486b-d5c2-4341-8a27-36eeeb08cd26" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2162.901504] env[62684]: DEBUG oslo_vmware.api [None req-1b42dd61-ffd5-49fa-89ce-b58bf26463a0 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053442, 'name': ReconfigVM_Task, 'duration_secs': 0.776882} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2162.901818] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b42dd61-ffd5-49fa-89ce-b58bf26463a0 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Reconfigured VM instance instance-0000005a to attach disk [datastore2] volume-e8dc2ee3-f51f-4210-9390-c40af41c56da/volume-e8dc2ee3-f51f-4210-9390-c40af41c56da.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2162.907484] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-35aa9289-1f50-4cd1-aab1-2a26e3c32881 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.927863] env[62684]: DEBUG oslo_vmware.api [None req-1b42dd61-ffd5-49fa-89ce-b58bf26463a0 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2162.927863] env[62684]: value = "task-2053447" [ 2162.927863] env[62684]: _type = "Task" [ 2162.927863] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2162.937294] env[62684]: DEBUG oslo_vmware.api [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523c1143-c699-ca10-e5a6-a7f6dc8b7dd7, 'name': SearchDatastore_Task, 'duration_secs': 0.01979} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2162.938101] env[62684]: DEBUG oslo_concurrency.lockutils [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2162.938515] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] d7f09d0e-f7b6-415e-8d82-47eba1153aa1/d7f09d0e-f7b6-415e-8d82-47eba1153aa1.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2162.938884] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d2fdd0e6-f933-4d61-9968-e8d6d3ca9ce0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.944402] env[62684]: DEBUG oslo_vmware.api [None req-1b42dd61-ffd5-49fa-89ce-b58bf26463a0 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053447, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2162.949219] env[62684]: DEBUG oslo_vmware.api [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 2162.949219] env[62684]: value = "task-2053448" [ 2162.949219] env[62684]: _type = "Task" [ 2162.949219] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2162.957634] env[62684]: DEBUG oslo_vmware.api [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053448, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2163.038289] env[62684]: DEBUG nova.objects.base [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=62684) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2163.043632] env[62684]: DEBUG nova.compute.manager [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2163.104321] env[62684]: DEBUG nova.network.neutron [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Successfully created port: fe223d4f-0585-454f-b724-0cdff1d2ceea {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2163.114823] env[62684]: DEBUG oslo_vmware.api [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053445, 'name': ReconfigVM_Task, 'duration_secs': 0.449543} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2163.115070] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Reconfigured VM instance instance-0000005c to attach disk [datastore2] 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5/7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2163.115705] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4f2408c7-6894-4760-beaa-5ef4b1be19f5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.128742] env[62684]: DEBUG oslo_vmware.api [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for the task: (returnval){ [ 2163.128742] env[62684]: value = "task-2053449" [ 2163.128742] env[62684]: _type = "Task" [ 2163.128742] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2163.142175] env[62684]: DEBUG oslo_vmware.api [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053449, 'name': Rename_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2163.323626] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053446, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2163.392153] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5740e291-37b8-4e08-80e9-0790f0d5dcc7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.400542] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-851c1cc0-cc3b-42b6-8e18-3828f241e22c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.442618] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a9bb842-d5c2-4518-9e6c-73e3f38776ab {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.460929] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d145d6b-f2c5-4efd-ab6c-831bb0f944e1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.466476] env[62684]: DEBUG oslo_vmware.api [None req-1b42dd61-ffd5-49fa-89ce-b58bf26463a0 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053447, 'name': ReconfigVM_Task, 'duration_secs': 0.190389} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2163.466870] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b42dd61-ffd5-49fa-89ce-b58bf26463a0 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421367', 'volume_id': 'e8dc2ee3-f51f-4210-9390-c40af41c56da', 'name': 'volume-e8dc2ee3-f51f-4210-9390-c40af41c56da', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'aebbc2cc-8973-4907-9ec8-085027fd7ca3', 'attached_at': '', 'detached_at': '', 'volume_id': 'e8dc2ee3-f51f-4210-9390-c40af41c56da', 'serial': 'e8dc2ee3-f51f-4210-9390-c40af41c56da'} {{(pid=62684) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2163.474538] env[62684]: DEBUG oslo_vmware.api [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053448, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2163.483565] env[62684]: DEBUG nova.compute.provider_tree [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2163.639709] env[62684]: DEBUG oslo_vmware.api [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053449, 'name': Rename_Task, 'duration_secs': 0.214573} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2163.640060] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2163.640269] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a778b7af-d181-4e8e-af89-d7c1818134f8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.647548] env[62684]: DEBUG oslo_vmware.api [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for the task: (returnval){ [ 2163.647548] env[62684]: value = "task-2053450" [ 2163.647548] env[62684]: _type = "Task" [ 2163.647548] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2163.661571] env[62684]: DEBUG oslo_vmware.api [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053450, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2163.815049] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053446, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2163.824699] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b18586d5-1630-414a-b407-d0d4db62dc0f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "0a8d7c48-cf90-4baf-a900-38fbd62869a6" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2163.824942] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b18586d5-1630-414a-b407-d0d4db62dc0f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "0a8d7c48-cf90-4baf-a900-38fbd62869a6" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2163.825152] env[62684]: DEBUG nova.compute.manager [None req-b18586d5-1630-414a-b407-d0d4db62dc0f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Going to confirm migration 3 {{(pid=62684) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 2163.965132] env[62684]: DEBUG oslo_vmware.api [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053448, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2164.010305] env[62684]: ERROR nova.scheduler.client.report [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [req-16cfa2e8-a260-421a-bd72-15f11d0c5300] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c23c281e-ec1f-4876-972e-a98655f2084f. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-16cfa2e8-a260-421a-bd72-15f11d0c5300"}]} [ 2164.032153] env[62684]: DEBUG nova.scheduler.client.report [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Refreshing inventories for resource provider c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2164.050415] env[62684]: DEBUG nova.scheduler.client.report [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Updating ProviderTree inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2164.050654] env[62684]: DEBUG nova.compute.provider_tree [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2164.055801] env[62684]: DEBUG nova.compute.manager [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2164.069164] env[62684]: DEBUG nova.scheduler.client.report [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Refreshing aggregate associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, aggregates: None {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2164.085852] env[62684]: DEBUG nova.network.neutron [req-db0b073b-4e97-42c8-a43a-4b48f9a3dca3 req-c5c14a89-c939-481c-8146-cac527297eca service nova] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Updated VIF entry in instance network info cache for port 4424c4bf-2ffd-4b4a-89f5-b6e9c9faa84b. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2164.086232] env[62684]: DEBUG nova.network.neutron [req-db0b073b-4e97-42c8-a43a-4b48f9a3dca3 req-c5c14a89-c939-481c-8146-cac527297eca service nova] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Updating instance_info_cache with network_info: [{"id": "4424c4bf-2ffd-4b4a-89f5-b6e9c9faa84b", "address": "fa:16:3e:e9:be:8c", "network": {"id": "bf53c8de-5f43-4a15-9911-25340615a63b", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1946277195-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "540d70f4b6274c38a5e79c00e389d8fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6db039c-542c-4544-a57d-ddcc6c1e8e45", "external-id": "nsx-vlan-transportzone-810", "segmentation_id": 810, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4424c4bf-2f", "ovs_interfaceid": "4424c4bf-2ffd-4b4a-89f5-b6e9c9faa84b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2164.093931] env[62684]: DEBUG nova.scheduler.client.report [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Refreshing trait associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2164.100444] env[62684]: DEBUG nova.virt.hardware [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2164.100807] env[62684]: DEBUG nova.virt.hardware [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2164.100884] env[62684]: DEBUG nova.virt.hardware [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2164.101098] env[62684]: DEBUG nova.virt.hardware [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2164.101258] env[62684]: DEBUG nova.virt.hardware [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2164.101411] env[62684]: DEBUG nova.virt.hardware [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2164.101631] env[62684]: DEBUG nova.virt.hardware [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2164.101797] env[62684]: DEBUG nova.virt.hardware [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2164.101972] env[62684]: DEBUG nova.virt.hardware [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2164.102162] env[62684]: DEBUG nova.virt.hardware [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2164.102423] env[62684]: DEBUG nova.virt.hardware [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2164.103215] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-882da570-c422-4ad9-8a4e-2b642ba55e43 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.114831] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cb8e743-c8ae-41c5-841a-011970a41aaf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.156749] env[62684]: DEBUG oslo_vmware.api [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053450, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2164.163586] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquiring lock "daf1486b-d5c2-4341-8a27-36eeeb08cd26" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2164.317790] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053446, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2164.333793] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1265cd6d-a7ff-4bed-860c-5f940bbf7f6e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.340938] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dfa36bd-8667-43bc-a42d-5dae38490de8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.371628] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9187ae2-265a-42b8-881d-09cdeb7cc049 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.379071] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ce467e6-79c3-4dc7-8e03-9913d7315cb5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.394132] env[62684]: DEBUG nova.compute.provider_tree [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2164.396147] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b18586d5-1630-414a-b407-d0d4db62dc0f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "refresh_cache-0a8d7c48-cf90-4baf-a900-38fbd62869a6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2164.396322] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b18586d5-1630-414a-b407-d0d4db62dc0f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquired lock "refresh_cache-0a8d7c48-cf90-4baf-a900-38fbd62869a6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2164.396513] env[62684]: DEBUG nova.network.neutron [None req-b18586d5-1630-414a-b407-d0d4db62dc0f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2164.396712] env[62684]: DEBUG nova.objects.instance [None req-b18586d5-1630-414a-b407-d0d4db62dc0f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lazy-loading 'info_cache' on Instance uuid 0a8d7c48-cf90-4baf-a900-38fbd62869a6 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2164.464965] env[62684]: DEBUG oslo_vmware.api [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053448, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2164.517416] env[62684]: DEBUG nova.objects.instance [None req-1b42dd61-ffd5-49fa-89ce-b58bf26463a0 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lazy-loading 'flavor' on Instance uuid aebbc2cc-8973-4907-9ec8-085027fd7ca3 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2164.540708] env[62684]: INFO nova.compute.manager [None req-f4b6f3a3-8552-4afe-9208-6bf7f909a21d tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Unrescuing [ 2164.540989] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f4b6f3a3-8552-4afe-9208-6bf7f909a21d tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Acquiring lock "refresh_cache-7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2164.541163] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f4b6f3a3-8552-4afe-9208-6bf7f909a21d tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Acquired lock "refresh_cache-7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2164.541336] env[62684]: DEBUG nova.network.neutron [None req-f4b6f3a3-8552-4afe-9208-6bf7f909a21d tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2164.588403] env[62684]: DEBUG oslo_concurrency.lockutils [req-db0b073b-4e97-42c8-a43a-4b48f9a3dca3 req-c5c14a89-c939-481c-8146-cac527297eca service nova] Releasing lock "refresh_cache-584845d2-d146-42bf-8ef5-58532fe24f65" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2164.658876] env[62684]: DEBUG oslo_vmware.api [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053450, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2164.816414] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053446, 'name': CreateVM_Task, 'duration_secs': 1.556039} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2164.816604] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2164.817351] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2164.817517] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2164.817846] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2164.818138] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8c0f7e6-58f6-4e25-8483-e6af1c035953 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.823057] env[62684]: DEBUG oslo_vmware.api [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Waiting for the task: (returnval){ [ 2164.823057] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d42f60-593a-3fe8-4c44-b7b741e874ee" [ 2164.823057] env[62684]: _type = "Task" [ 2164.823057] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2164.830952] env[62684]: DEBUG oslo_vmware.api [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d42f60-593a-3fe8-4c44-b7b741e874ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2164.928161] env[62684]: DEBUG nova.scheduler.client.report [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Updated inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f with generation 140 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2164.928477] env[62684]: DEBUG nova.compute.provider_tree [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Updating resource provider c23c281e-ec1f-4876-972e-a98655f2084f generation from 140 to 141 during operation: update_inventory {{(pid=62684) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2164.928682] env[62684]: DEBUG nova.compute.provider_tree [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2164.968177] env[62684]: DEBUG oslo_vmware.api [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053448, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.656564} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2164.968541] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] d7f09d0e-f7b6-415e-8d82-47eba1153aa1/d7f09d0e-f7b6-415e-8d82-47eba1153aa1.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2164.969422] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2164.969422] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f2d36575-8a79-45e3-becb-93f9b0cbb029 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.975823] env[62684]: DEBUG oslo_vmware.api [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 2164.975823] env[62684]: value = "task-2053451" [ 2164.975823] env[62684]: _type = "Task" [ 2164.975823] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2164.980840] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2ca817e-037d-4b4b-80db-8d8f9666431e tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Volume attach. Driver type: vmdk {{(pid=62684) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2164.981197] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2ca817e-037d-4b4b-80db-8d8f9666431e tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421371', 'volume_id': '004e22f3-56ef-4e39-994c-bd13ad8bbc11', 'name': 'volume-004e22f3-56ef-4e39-994c-bd13ad8bbc11', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2baabe7a-ed33-4cef-9acc-a7b804610b0a', 'attached_at': '', 'detached_at': '', 'volume_id': '004e22f3-56ef-4e39-994c-bd13ad8bbc11', 'serial': '004e22f3-56ef-4e39-994c-bd13ad8bbc11'} {{(pid=62684) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2164.982742] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f6ace2f-5faa-4cf0-bd33-fd94edc90b25 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.992149] env[62684]: DEBUG oslo_vmware.api [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053451, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2165.004599] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-070af64e-f822-4e3b-8085-ef895ecb054a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.032207] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2ca817e-037d-4b4b-80db-8d8f9666431e tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Reconfiguring VM instance instance-0000003f to attach disk [datastore2] volume-004e22f3-56ef-4e39-994c-bd13ad8bbc11/volume-004e22f3-56ef-4e39-994c-bd13ad8bbc11.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2165.032881] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1b42dd61-ffd5-49fa-89ce-b58bf26463a0 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "aebbc2cc-8973-4907-9ec8-085027fd7ca3" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.834s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2165.033871] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b45775c4-8f55-43a3-baeb-a20d8806adfe {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.054571] env[62684]: DEBUG oslo_vmware.api [None req-c2ca817e-037d-4b4b-80db-8d8f9666431e tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 2165.054571] env[62684]: value = "task-2053452" [ 2165.054571] env[62684]: _type = "Task" [ 2165.054571] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2165.064307] env[62684]: DEBUG oslo_vmware.api [None req-c2ca817e-037d-4b4b-80db-8d8f9666431e tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053452, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2165.158657] env[62684]: DEBUG oslo_vmware.api [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053450, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2165.333967] env[62684]: DEBUG oslo_vmware.api [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d42f60-593a-3fe8-4c44-b7b741e874ee, 'name': SearchDatastore_Task, 'duration_secs': 0.020074} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2165.334304] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2165.334554] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2165.334890] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2165.334985] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2165.335125] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2165.335395] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-901728ca-3c33-42e0-a99d-c930721a546d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.343655] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2165.343839] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2165.344571] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5cf0368d-ee16-4c9c-a27a-ebd5868e6799 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.349917] env[62684]: DEBUG oslo_vmware.api [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Waiting for the task: (returnval){ [ 2165.349917] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52bbd723-de68-64c4-fd64-d282fca59803" [ 2165.349917] env[62684]: _type = "Task" [ 2165.349917] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2165.357644] env[62684]: DEBUG oslo_vmware.api [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52bbd723-de68-64c4-fd64-d282fca59803, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2165.389762] env[62684]: DEBUG nova.network.neutron [None req-f4b6f3a3-8552-4afe-9208-6bf7f909a21d tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Updating instance_info_cache with network_info: [{"id": "9755599a-bf6c-415f-b6dc-88d5d3774944", "address": "fa:16:3e:1d:6c:e1", "network": {"id": "8eebb0b3-51e4-44c0-a4b4-b45647332a9e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-398889609-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "43304d5c52344bd9841dbc760a174b4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9755599a-bf", "ovs_interfaceid": "9755599a-bf6c-415f-b6dc-88d5d3774944", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2165.434469] env[62684]: DEBUG oslo_concurrency.lockutils [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.405s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2165.436974] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.538s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2165.438936] env[62684]: INFO nova.compute.claims [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2165.485384] env[62684]: DEBUG oslo_vmware.api [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053451, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066803} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2165.485674] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2165.486498] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97f45a41-3537-4650-96fa-fcf48bbe4905 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.509368] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Reconfiguring VM instance instance-0000005d to attach disk [datastore2] d7f09d0e-f7b6-415e-8d82-47eba1153aa1/d7f09d0e-f7b6-415e-8d82-47eba1153aa1.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2165.509765] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8ba2ba0e-2fe5-467c-ab15-5300e85fa20b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.530087] env[62684]: DEBUG oslo_vmware.api [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 2165.530087] env[62684]: value = "task-2053453" [ 2165.530087] env[62684]: _type = "Task" [ 2165.530087] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2165.540009] env[62684]: DEBUG oslo_vmware.api [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053453, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2165.567447] env[62684]: DEBUG oslo_vmware.api [None req-c2ca817e-037d-4b4b-80db-8d8f9666431e tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053452, 'name': ReconfigVM_Task, 'duration_secs': 0.392578} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2165.567566] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2ca817e-037d-4b4b-80db-8d8f9666431e tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Reconfigured VM instance instance-0000003f to attach disk [datastore2] volume-004e22f3-56ef-4e39-994c-bd13ad8bbc11/volume-004e22f3-56ef-4e39-994c-bd13ad8bbc11.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2165.572248] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d9fa294c-0a93-4993-91d2-91ea811cfbcb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.589459] env[62684]: DEBUG oslo_vmware.api [None req-c2ca817e-037d-4b4b-80db-8d8f9666431e tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 2165.589459] env[62684]: value = "task-2053454" [ 2165.589459] env[62684]: _type = "Task" [ 2165.589459] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2165.598800] env[62684]: DEBUG oslo_vmware.api [None req-c2ca817e-037d-4b4b-80db-8d8f9666431e tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053454, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2165.602295] env[62684]: DEBUG oslo_concurrency.lockutils [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "aebbc2cc-8973-4907-9ec8-085027fd7ca3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2165.602529] env[62684]: DEBUG oslo_concurrency.lockutils [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "aebbc2cc-8973-4907-9ec8-085027fd7ca3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2165.602745] env[62684]: DEBUG oslo_concurrency.lockutils [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "aebbc2cc-8973-4907-9ec8-085027fd7ca3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2165.602931] env[62684]: DEBUG oslo_concurrency.lockutils [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "aebbc2cc-8973-4907-9ec8-085027fd7ca3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2165.603118] env[62684]: DEBUG oslo_concurrency.lockutils [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "aebbc2cc-8973-4907-9ec8-085027fd7ca3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2165.605500] env[62684]: INFO nova.compute.manager [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Terminating instance [ 2165.607685] env[62684]: DEBUG nova.compute.manager [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2165.607975] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2165.608188] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-db6550fa-3d22-4911-8cd0-c0518063baf5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.614343] env[62684]: DEBUG oslo_vmware.api [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2165.614343] env[62684]: value = "task-2053455" [ 2165.614343] env[62684]: _type = "Task" [ 2165.614343] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2165.625085] env[62684]: DEBUG oslo_vmware.api [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053455, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2165.658572] env[62684]: DEBUG oslo_vmware.api [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053450, 'name': PowerOnVM_Task, 'duration_secs': 1.75689} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2165.658869] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2165.659100] env[62684]: INFO nova.compute.manager [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Took 10.19 seconds to spawn the instance on the hypervisor. [ 2165.659294] env[62684]: DEBUG nova.compute.manager [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2165.660097] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86540aa8-809c-4979-b0d4-daedac75d997 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.834202] env[62684]: DEBUG nova.network.neutron [None req-b18586d5-1630-414a-b407-d0d4db62dc0f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Updating instance_info_cache with network_info: [{"id": "f121aad3-8e11-4583-8919-c502deebb5e2", "address": "fa:16:3e:08:50:be", "network": {"id": "7678b347-6a54-4b84-9a4d-b566bbeb1ea4", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-51664912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76d88ac878d44480b3b54b24ab87efa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf121aad3-8e", "ovs_interfaceid": "f121aad3-8e11-4583-8919-c502deebb5e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2165.862939] env[62684]: DEBUG oslo_vmware.api [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52bbd723-de68-64c4-fd64-d282fca59803, 'name': SearchDatastore_Task, 'duration_secs': 0.012551} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2165.863877] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee7a0ea8-4a8a-4379-9143-5de84a72cdab {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.869270] env[62684]: DEBUG oslo_vmware.api [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Waiting for the task: (returnval){ [ 2165.869270] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5258248d-ba1e-f63b-f161-0d28431f5894" [ 2165.869270] env[62684]: _type = "Task" [ 2165.869270] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2165.877602] env[62684]: DEBUG oslo_vmware.api [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5258248d-ba1e-f63b-f161-0d28431f5894, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2165.892818] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f4b6f3a3-8552-4afe-9208-6bf7f909a21d tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Releasing lock "refresh_cache-7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2165.893522] env[62684]: DEBUG nova.objects.instance [None req-f4b6f3a3-8552-4afe-9208-6bf7f909a21d tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Lazy-loading 'flavor' on Instance uuid 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2165.947743] env[62684]: DEBUG oslo_concurrency.lockutils [None req-207ca172-ffd9-46f1-a697-cb4e0f2e0ab6 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lock "daf1486b-d5c2-4341-8a27-36eeeb08cd26" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 24.533s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2165.948649] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lock "daf1486b-d5c2-4341-8a27-36eeeb08cd26" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.786s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2165.948795] env[62684]: INFO nova.compute.manager [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Unshelving [ 2166.041069] env[62684]: DEBUG oslo_vmware.api [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053453, 'name': ReconfigVM_Task, 'duration_secs': 0.366714} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2166.041380] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Reconfigured VM instance instance-0000005d to attach disk [datastore2] d7f09d0e-f7b6-415e-8d82-47eba1153aa1/d7f09d0e-f7b6-415e-8d82-47eba1153aa1.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2166.042113] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9e84bd97-05ac-407f-8d62-fa15ad3e0307 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.048647] env[62684]: DEBUG oslo_vmware.api [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 2166.048647] env[62684]: value = "task-2053456" [ 2166.048647] env[62684]: _type = "Task" [ 2166.048647] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2166.056965] env[62684]: DEBUG oslo_vmware.api [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053456, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2166.099090] env[62684]: DEBUG oslo_vmware.api [None req-c2ca817e-037d-4b4b-80db-8d8f9666431e tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053454, 'name': ReconfigVM_Task, 'duration_secs': 0.180903} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2166.099421] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2ca817e-037d-4b4b-80db-8d8f9666431e tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421371', 'volume_id': '004e22f3-56ef-4e39-994c-bd13ad8bbc11', 'name': 'volume-004e22f3-56ef-4e39-994c-bd13ad8bbc11', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2baabe7a-ed33-4cef-9acc-a7b804610b0a', 'attached_at': '', 'detached_at': '', 'volume_id': '004e22f3-56ef-4e39-994c-bd13ad8bbc11', 'serial': '004e22f3-56ef-4e39-994c-bd13ad8bbc11'} {{(pid=62684) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2166.123554] env[62684]: DEBUG oslo_vmware.api [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053455, 'name': PowerOffVM_Task, 'duration_secs': 0.233726} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2166.123875] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2166.124143] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Volume detach. Driver type: vmdk {{(pid=62684) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2166.124407] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421367', 'volume_id': 'e8dc2ee3-f51f-4210-9390-c40af41c56da', 'name': 'volume-e8dc2ee3-f51f-4210-9390-c40af41c56da', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'aebbc2cc-8973-4907-9ec8-085027fd7ca3', 'attached_at': '', 'detached_at': '', 'volume_id': 'e8dc2ee3-f51f-4210-9390-c40af41c56da', 'serial': 'e8dc2ee3-f51f-4210-9390-c40af41c56da'} {{(pid=62684) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2166.125351] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43c8ad14-0bb8-42fc-bbb1-2bd00ce83f9e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.147834] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abce0e27-b113-492b-940e-40c7c7511f48 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.833716] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b18586d5-1630-414a-b407-d0d4db62dc0f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Releasing lock "refresh_cache-0a8d7c48-cf90-4baf-a900-38fbd62869a6" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2166.834082] env[62684]: DEBUG nova.objects.instance [None req-b18586d5-1630-414a-b407-d0d4db62dc0f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lazy-loading 'migration_context' on Instance uuid 0a8d7c48-cf90-4baf-a900-38fbd62869a6 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2166.836830] env[62684]: DEBUG oslo_concurrency.lockutils [None req-495243ee-91a6-4e98-baaa-85e87b56e7f6 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Acquiring lock "7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2166.853053] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-315f9c0f-cdd6-4a86-afc0-8151f70f38e8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.855608] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af78d7e3-979c-4048-b6bc-a0d7e2700066 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.858901] env[62684]: INFO nova.compute.manager [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Took 15.84 seconds to build instance. [ 2166.900741] env[62684]: DEBUG oslo_vmware.api [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053456, 'name': Rename_Task, 'duration_secs': 0.196395} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2166.902127] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4b6f3a3-8552-4afe-9208-6bf7f909a21d tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2166.902535] env[62684]: DEBUG oslo_vmware.api [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5258248d-ba1e-f63b-f161-0d28431f5894, 'name': SearchDatastore_Task, 'duration_secs': 0.015612} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2166.904402] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2166.904643] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2b29dcf2-46e8-4bf7-8358-6810721fb710 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.906715] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-396b86fd-3852-4900-9a6a-1a0169cfb391 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.909107] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2166.909372] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 584845d2-d146-42bf-8ef5-58532fe24f65/584845d2-d146-42bf-8ef5-58532fe24f65.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2166.909997] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9c12cb9b-d0db-4dd0-abc4-926ad1f6bedf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.911608] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-558a1811-98ba-4d44-b319-1e5967881b72 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.930805] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] The volume has not been displaced from its original location: [datastore2] volume-e8dc2ee3-f51f-4210-9390-c40af41c56da/volume-e8dc2ee3-f51f-4210-9390-c40af41c56da.vmdk. No consolidation needed. {{(pid=62684) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2166.936303] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Reconfiguring VM instance instance-0000005a to detach disk 2001 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2166.936786] env[62684]: DEBUG oslo_vmware.api [None req-f4b6f3a3-8552-4afe-9208-6bf7f909a21d tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for the task: (returnval){ [ 2166.936786] env[62684]: value = "task-2053457" [ 2166.936786] env[62684]: _type = "Task" [ 2166.936786] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2166.941576] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-36aa74cd-ec6e-4649-bbea-bf7435594e05 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.954333] env[62684]: DEBUG oslo_vmware.api [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Waiting for the task: (returnval){ [ 2166.954333] env[62684]: value = "task-2053459" [ 2166.954333] env[62684]: _type = "Task" [ 2166.954333] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2166.954580] env[62684]: DEBUG oslo_vmware.api [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 2166.954580] env[62684]: value = "task-2053458" [ 2166.954580] env[62684]: _type = "Task" [ 2166.954580] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2166.969394] env[62684]: DEBUG oslo_vmware.api [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2166.969394] env[62684]: value = "task-2053460" [ 2166.969394] env[62684]: _type = "Task" [ 2166.969394] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2166.981028] env[62684]: DEBUG oslo_vmware.api [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053459, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2166.981283] env[62684]: DEBUG oslo_vmware.api [None req-f4b6f3a3-8552-4afe-9208-6bf7f909a21d tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053457, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2166.981480] env[62684]: DEBUG oslo_vmware.api [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053458, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2166.988768] env[62684]: DEBUG oslo_vmware.api [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053460, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.170516] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97e891c1-cc5e-45a2-8415-1ed9d79812cd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.180870] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d630b04e-b76e-49d1-9857-e972517bba90 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.215660] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17edce47-1f80-4dd6-9752-59ac3fa35b04 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.223998] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9114cf99-b005-45f0-81b0-e9328b2d848a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.239093] env[62684]: DEBUG nova.compute.provider_tree [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2167.349934] env[62684]: DEBUG nova.objects.base [None req-b18586d5-1630-414a-b407-d0d4db62dc0f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Object Instance<0a8d7c48-cf90-4baf-a900-38fbd62869a6> lazy-loaded attributes: info_cache,migration_context {{(pid=62684) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2167.350908] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96e366b3-e593-4a04-b7b8-8e66d6bd45d5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.373048] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8268db13-2274-4e0c-8615-99e8477e9a29 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Lock "7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.364s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2167.374317] env[62684]: DEBUG oslo_concurrency.lockutils [None req-495243ee-91a6-4e98-baaa-85e87b56e7f6 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Lock "7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.537s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2167.374554] env[62684]: DEBUG oslo_concurrency.lockutils [None req-495243ee-91a6-4e98-baaa-85e87b56e7f6 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Acquiring lock "7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2167.374773] env[62684]: DEBUG oslo_concurrency.lockutils [None req-495243ee-91a6-4e98-baaa-85e87b56e7f6 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Lock "7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2167.374947] env[62684]: DEBUG oslo_concurrency.lockutils [None req-495243ee-91a6-4e98-baaa-85e87b56e7f6 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Lock "7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2167.376519] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9216b92-bfd6-4362-992c-271ae55d1ea9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.379339] env[62684]: INFO nova.compute.manager [None req-495243ee-91a6-4e98-baaa-85e87b56e7f6 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Terminating instance [ 2167.382000] env[62684]: DEBUG nova.compute.manager [None req-495243ee-91a6-4e98-baaa-85e87b56e7f6 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2167.382217] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-495243ee-91a6-4e98-baaa-85e87b56e7f6 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2167.383456] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e785a45-c5c1-453a-beb3-4f921a304d12 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.388651] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2167.389041] env[62684]: DEBUG oslo_vmware.api [None req-b18586d5-1630-414a-b407-d0d4db62dc0f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2167.389041] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5255acd4-e101-c366-23a0-aa3dd5345ba3" [ 2167.389041] env[62684]: _type = "Task" [ 2167.389041] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2167.394789] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-495243ee-91a6-4e98-baaa-85e87b56e7f6 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2167.395451] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eb97394d-53ba-4ed0-8d35-19022c53ff0c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.403232] env[62684]: DEBUG oslo_vmware.api [None req-b18586d5-1630-414a-b407-d0d4db62dc0f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5255acd4-e101-c366-23a0-aa3dd5345ba3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.404716] env[62684]: DEBUG oslo_vmware.api [None req-495243ee-91a6-4e98-baaa-85e87b56e7f6 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for the task: (returnval){ [ 2167.404716] env[62684]: value = "task-2053461" [ 2167.404716] env[62684]: _type = "Task" [ 2167.404716] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2167.413212] env[62684]: DEBUG nova.objects.instance [None req-c2ca817e-037d-4b4b-80db-8d8f9666431e tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lazy-loading 'flavor' on Instance uuid 2baabe7a-ed33-4cef-9acc-a7b804610b0a {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2167.414733] env[62684]: DEBUG oslo_vmware.api [None req-495243ee-91a6-4e98-baaa-85e87b56e7f6 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053461, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.464664] env[62684]: DEBUG oslo_vmware.api [None req-f4b6f3a3-8552-4afe-9208-6bf7f909a21d tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053457, 'name': PowerOffVM_Task, 'duration_secs': 0.292279} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2167.471130] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4b6f3a3-8552-4afe-9208-6bf7f909a21d tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2167.476603] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4b6f3a3-8552-4afe-9208-6bf7f909a21d tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Reconfiguring VM instance instance-0000005b to detach disk 2001 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2167.477040] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-edda3424-5675-4596-befb-b4779bd3897a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.498964] env[62684]: DEBUG oslo_vmware.api [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053459, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.503285] env[62684]: DEBUG oslo_vmware.api [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053458, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.504768] env[62684]: DEBUG oslo_vmware.api [None req-f4b6f3a3-8552-4afe-9208-6bf7f909a21d tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for the task: (returnval){ [ 2167.504768] env[62684]: value = "task-2053462" [ 2167.504768] env[62684]: _type = "Task" [ 2167.504768] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2167.507883] env[62684]: DEBUG oslo_vmware.api [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053460, 'name': ReconfigVM_Task, 'duration_secs': 0.237275} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2167.510987] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Reconfigured VM instance instance-0000005a to detach disk 2001 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2167.515703] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6da78e81-848c-467f-90ae-07ffd25ad65e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.531529] env[62684]: DEBUG oslo_vmware.api [None req-f4b6f3a3-8552-4afe-9208-6bf7f909a21d tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053462, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.532886] env[62684]: DEBUG oslo_vmware.api [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2167.532886] env[62684]: value = "task-2053463" [ 2167.532886] env[62684]: _type = "Task" [ 2167.532886] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2167.541216] env[62684]: DEBUG oslo_vmware.api [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053463, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.662796] env[62684]: INFO nova.compute.manager [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Rescuing [ 2167.663149] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquiring lock "refresh_cache-2baabe7a-ed33-4cef-9acc-a7b804610b0a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2167.663349] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquired lock "refresh_cache-2baabe7a-ed33-4cef-9acc-a7b804610b0a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2167.663560] env[62684]: DEBUG nova.network.neutron [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2167.742403] env[62684]: DEBUG nova.scheduler.client.report [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2167.900638] env[62684]: DEBUG oslo_vmware.api [None req-b18586d5-1630-414a-b407-d0d4db62dc0f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5255acd4-e101-c366-23a0-aa3dd5345ba3, 'name': SearchDatastore_Task, 'duration_secs': 0.015162} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2167.900924] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b18586d5-1630-414a-b407-d0d4db62dc0f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2167.912638] env[62684]: DEBUG oslo_vmware.api [None req-495243ee-91a6-4e98-baaa-85e87b56e7f6 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053461, 'name': PowerOffVM_Task, 'duration_secs': 0.222857} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2167.912888] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-495243ee-91a6-4e98-baaa-85e87b56e7f6 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2167.913921] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-495243ee-91a6-4e98-baaa-85e87b56e7f6 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2167.913921] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-060ee19d-ffba-469d-8015-87398b1c649a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.918749] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c2ca817e-037d-4b4b-80db-8d8f9666431e tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lock "2baabe7a-ed33-4cef-9acc-a7b804610b0a" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.066s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2167.976695] env[62684]: DEBUG oslo_vmware.api [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053459, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.584486} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2167.979285] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 584845d2-d146-42bf-8ef5-58532fe24f65/584845d2-d146-42bf-8ef5-58532fe24f65.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2167.979567] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2167.979911] env[62684]: DEBUG oslo_vmware.api [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053458, 'name': PowerOnVM_Task, 'duration_secs': 0.837978} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2167.980143] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-88223cf4-4782-4294-912e-bec2db7ca949 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.981969] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2167.982206] env[62684]: INFO nova.compute.manager [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Took 10.59 seconds to spawn the instance on the hypervisor. [ 2167.982410] env[62684]: DEBUG nova.compute.manager [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2167.983235] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8070555-4e13-4662-92c7-6c99e2bd6cad {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.994031] env[62684]: DEBUG oslo_vmware.api [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Waiting for the task: (returnval){ [ 2167.994031] env[62684]: value = "task-2053465" [ 2167.994031] env[62684]: _type = "Task" [ 2167.994031] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2168.001317] env[62684]: DEBUG oslo_vmware.api [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053465, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2168.015941] env[62684]: DEBUG oslo_vmware.api [None req-f4b6f3a3-8552-4afe-9208-6bf7f909a21d tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053462, 'name': ReconfigVM_Task, 'duration_secs': 0.280181} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2168.016224] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4b6f3a3-8552-4afe-9208-6bf7f909a21d tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Reconfigured VM instance instance-0000005b to detach disk 2001 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2168.016431] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4b6f3a3-8552-4afe-9208-6bf7f909a21d tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2168.016719] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8aaee0dc-7185-46f7-ac79-4a7915d524d2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.022585] env[62684]: DEBUG oslo_vmware.api [None req-f4b6f3a3-8552-4afe-9208-6bf7f909a21d tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for the task: (returnval){ [ 2168.022585] env[62684]: value = "task-2053466" [ 2168.022585] env[62684]: _type = "Task" [ 2168.022585] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2168.030033] env[62684]: DEBUG oslo_vmware.api [None req-f4b6f3a3-8552-4afe-9208-6bf7f909a21d tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053466, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2168.041205] env[62684]: DEBUG oslo_vmware.api [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053463, 'name': ReconfigVM_Task, 'duration_secs': 0.143576} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2168.041518] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421367', 'volume_id': 'e8dc2ee3-f51f-4210-9390-c40af41c56da', 'name': 'volume-e8dc2ee3-f51f-4210-9390-c40af41c56da', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'aebbc2cc-8973-4907-9ec8-085027fd7ca3', 'attached_at': '', 'detached_at': '', 'volume_id': 'e8dc2ee3-f51f-4210-9390-c40af41c56da', 'serial': 'e8dc2ee3-f51f-4210-9390-c40af41c56da'} {{(pid=62684) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2168.041818] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2168.042625] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fe2f31c-40cd-4211-b7f7-e69c48bb5d41 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.049185] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2168.049814] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ab1bb258-74ce-42b3-b482-28e193343b67 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.248152] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.811s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2168.248803] env[62684]: DEBUG nova.compute.manager [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2168.253964] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.865s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2168.254249] env[62684]: DEBUG nova.objects.instance [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lazy-loading 'pci_requests' on Instance uuid daf1486b-d5c2-4341-8a27-36eeeb08cd26 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2168.484878] env[62684]: DEBUG nova.network.neutron [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Updating instance_info_cache with network_info: [{"id": "3fcb3920-5e10-45e2-865d-cc9b89a1e335", "address": "fa:16:3e:2f:71:d6", "network": {"id": "bd253713-4e81-4c94-9689-22b81e7f51b6", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-307001665-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd812751722143fabedfa986a2d98b59", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3fcb3920-5e", "ovs_interfaceid": "3fcb3920-5e10-45e2-865d-cc9b89a1e335", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2168.504656] env[62684]: INFO nova.compute.manager [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Took 16.83 seconds to build instance. [ 2168.511236] env[62684]: DEBUG oslo_vmware.api [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053465, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086362} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2168.511498] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2168.512279] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc02193d-148a-40b6-b91c-382d5399f669 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.534434] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Reconfiguring VM instance instance-0000005e to attach disk [datastore2] 584845d2-d146-42bf-8ef5-58532fe24f65/584845d2-d146-42bf-8ef5-58532fe24f65.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2168.537497] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-63fa397c-4d23-40de-87ce-d278404cfda3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.557222] env[62684]: DEBUG oslo_vmware.api [None req-f4b6f3a3-8552-4afe-9208-6bf7f909a21d tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053466, 'name': PowerOnVM_Task, 'duration_secs': 0.448898} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2168.558268] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4b6f3a3-8552-4afe-9208-6bf7f909a21d tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2168.558518] env[62684]: DEBUG nova.compute.manager [None req-f4b6f3a3-8552-4afe-9208-6bf7f909a21d tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2168.558850] env[62684]: DEBUG oslo_vmware.api [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Waiting for the task: (returnval){ [ 2168.558850] env[62684]: value = "task-2053468" [ 2168.558850] env[62684]: _type = "Task" [ 2168.558850] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2168.559508] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75d83c04-706d-451a-a32b-6f0d59cff3c4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.572701] env[62684]: DEBUG oslo_vmware.api [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053468, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2168.757442] env[62684]: DEBUG nova.compute.utils [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2168.760676] env[62684]: DEBUG nova.objects.instance [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lazy-loading 'numa_topology' on Instance uuid daf1486b-d5c2-4341-8a27-36eeeb08cd26 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2168.761827] env[62684]: DEBUG nova.compute.manager [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2168.761975] env[62684]: DEBUG nova.network.neutron [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2168.818201] env[62684]: DEBUG nova.policy [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6e8b54745b53458eafe4d911d7d6d7d1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6c54f74085f343d2b790145b0d82a9f8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2168.988150] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Releasing lock "refresh_cache-2baabe7a-ed33-4cef-9acc-a7b804610b0a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2169.008773] env[62684]: DEBUG oslo_concurrency.lockutils [None req-04e8ae9f-1a2a-4f11-89de-c68b940832e7 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "d7f09d0e-f7b6-415e-8d82-47eba1153aa1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.347s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2169.072367] env[62684]: DEBUG oslo_vmware.api [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053468, 'name': ReconfigVM_Task, 'duration_secs': 0.284359} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2169.073078] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Reconfigured VM instance instance-0000005e to attach disk [datastore2] 584845d2-d146-42bf-8ef5-58532fe24f65/584845d2-d146-42bf-8ef5-58532fe24f65.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2169.075937] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-09a80f8e-9762-4f95-b815-a13bedd3cb6f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.089104] env[62684]: DEBUG oslo_vmware.api [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Waiting for the task: (returnval){ [ 2169.089104] env[62684]: value = "task-2053469" [ 2169.089104] env[62684]: _type = "Task" [ 2169.089104] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2169.098868] env[62684]: DEBUG oslo_vmware.api [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053469, 'name': Rename_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2169.265496] env[62684]: DEBUG nova.compute.manager [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2169.266406] env[62684]: INFO nova.compute.claims [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2169.286898] env[62684]: DEBUG nova.network.neutron [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Successfully created port: 89a358de-54fa-41bb-ae43-85a7abbb900b {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2169.523601] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2169.524146] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-17fda56f-2c59-4b61-b39b-d3e60d7bd5af {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.535056] env[62684]: DEBUG oslo_vmware.api [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 2169.535056] env[62684]: value = "task-2053470" [ 2169.535056] env[62684]: _type = "Task" [ 2169.535056] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2169.550408] env[62684]: DEBUG oslo_vmware.api [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053470, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2169.598681] env[62684]: DEBUG oslo_vmware.api [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053469, 'name': Rename_Task, 'duration_secs': 0.15994} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2169.599873] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2169.600256] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-217efe02-2227-4376-8007-fec7fa2a41fd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.608409] env[62684]: DEBUG oslo_vmware.api [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Waiting for the task: (returnval){ [ 2169.608409] env[62684]: value = "task-2053471" [ 2169.608409] env[62684]: _type = "Task" [ 2169.608409] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2169.615600] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Acquiring lock "7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2169.615985] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Lock "7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2169.616571] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Acquiring lock "7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2169.616775] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Lock "7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2169.617062] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Lock "7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2169.621276] env[62684]: INFO nova.compute.manager [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Terminating instance [ 2169.627348] env[62684]: DEBUG oslo_vmware.api [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053471, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2169.628077] env[62684]: DEBUG nova.compute.manager [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2169.628326] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2169.629194] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6f45ed3-15d0-4eae-8af1-13e0bfe98aa2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.637215] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2169.637692] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-29cf3818-4d17-445a-8073-362e902c965b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.644281] env[62684]: DEBUG oslo_vmware.api [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for the task: (returnval){ [ 2169.644281] env[62684]: value = "task-2053472" [ 2169.644281] env[62684]: _type = "Task" [ 2169.644281] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2169.656910] env[62684]: DEBUG oslo_vmware.api [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053472, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2170.046381] env[62684]: DEBUG oslo_vmware.api [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053470, 'name': PowerOffVM_Task, 'duration_secs': 0.30641} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2170.047364] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2170.048214] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afdf73f8-3d54-4e33-b9cf-a8417da2757a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.080311] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b50c32ec-7f59-491a-b2dc-af33884e4ba6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.095556] env[62684]: DEBUG nova.compute.manager [req-592906b3-b96a-4a4a-8379-a7b1f6657c0f req-376a9b0f-9ad3-4e0d-9f9c-726d52791044 service nova] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Received event network-changed-b5747949-00d7-4815-9080-52285a6a8813 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2170.095733] env[62684]: DEBUG nova.compute.manager [req-592906b3-b96a-4a4a-8379-a7b1f6657c0f req-376a9b0f-9ad3-4e0d-9f9c-726d52791044 service nova] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Refreshing instance network info cache due to event network-changed-b5747949-00d7-4815-9080-52285a6a8813. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2170.095963] env[62684]: DEBUG oslo_concurrency.lockutils [req-592906b3-b96a-4a4a-8379-a7b1f6657c0f req-376a9b0f-9ad3-4e0d-9f9c-726d52791044 service nova] Acquiring lock "refresh_cache-f037d6b2-2082-4611-985e-b9a077eb8250" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2170.096133] env[62684]: DEBUG oslo_concurrency.lockutils [req-592906b3-b96a-4a4a-8379-a7b1f6657c0f req-376a9b0f-9ad3-4e0d-9f9c-726d52791044 service nova] Acquired lock "refresh_cache-f037d6b2-2082-4611-985e-b9a077eb8250" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2170.096301] env[62684]: DEBUG nova.network.neutron [req-592906b3-b96a-4a4a-8379-a7b1f6657c0f req-376a9b0f-9ad3-4e0d-9f9c-726d52791044 service nova] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Refreshing network info cache for port b5747949-00d7-4815-9080-52285a6a8813 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2170.117185] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2170.117185] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b021bae5-a9d6-490c-b7f3-7a2be1777bb7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.123679] env[62684]: DEBUG oslo_vmware.api [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053471, 'name': PowerOnVM_Task, 'duration_secs': 0.440243} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2170.125247] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2170.126513] env[62684]: INFO nova.compute.manager [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Took 10.37 seconds to spawn the instance on the hypervisor. [ 2170.126763] env[62684]: DEBUG nova.compute.manager [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2170.127920] env[62684]: DEBUG oslo_vmware.api [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 2170.127920] env[62684]: value = "task-2053473" [ 2170.127920] env[62684]: _type = "Task" [ 2170.127920] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2170.129141] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-331ebd1b-0b39-49bb-a55b-7b062fb7fd67 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.153955] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] VM already powered off {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2170.154291] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2170.154602] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2170.154802] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2170.155041] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2170.155371] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7bef0c0d-621c-4bb8-8dcd-fe31bc588733 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.162793] env[62684]: DEBUG oslo_vmware.api [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053472, 'name': PowerOffVM_Task, 'duration_secs': 0.225055} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2170.163141] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2170.163462] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2170.164727] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8a560e25-1773-45a6-aa7b-bb3c5ac8bdf0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.166461] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2170.166747] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2170.167563] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ecb1b082-e04e-4c78-a1b8-4a753c6905da {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.173381] env[62684]: DEBUG oslo_vmware.api [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 2170.173381] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52226387-4d9f-7694-cd37-cf2790bad1ef" [ 2170.173381] env[62684]: _type = "Task" [ 2170.173381] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2170.182181] env[62684]: DEBUG oslo_vmware.api [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52226387-4d9f-7694-cd37-cf2790bad1ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2170.278053] env[62684]: DEBUG nova.compute.manager [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2170.313766] env[62684]: DEBUG nova.virt.hardware [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2170.314041] env[62684]: DEBUG nova.virt.hardware [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2170.314274] env[62684]: DEBUG nova.virt.hardware [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2170.314407] env[62684]: DEBUG nova.virt.hardware [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2170.314555] env[62684]: DEBUG nova.virt.hardware [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2170.314707] env[62684]: DEBUG nova.virt.hardware [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2170.314922] env[62684]: DEBUG nova.virt.hardware [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2170.315099] env[62684]: DEBUG nova.virt.hardware [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2170.315276] env[62684]: DEBUG nova.virt.hardware [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2170.315448] env[62684]: DEBUG nova.virt.hardware [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2170.315638] env[62684]: DEBUG nova.virt.hardware [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2170.317209] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4948b7cc-9249-4829-bf96-f1ae8f3dc1fb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.328761] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6295690-5ba4-4508-aafb-db5264f77954 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.614815] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60bc4d21-9328-49ec-83af-e7d3b4ebe29e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.623849] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77e9df33-e741-4297-b484-629e0a3159a2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.664995] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-568a09d7-27dc-47d9-907b-fbf9ce977258 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.671846] env[62684]: INFO nova.compute.manager [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Took 15.87 seconds to build instance. [ 2170.681597] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-892421b5-1057-4c87-881c-0620a17f0ae9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.690697] env[62684]: DEBUG oslo_vmware.api [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52226387-4d9f-7694-cd37-cf2790bad1ef, 'name': SearchDatastore_Task, 'duration_secs': 0.008927} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2170.691809] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20bb0437-8b1b-40b4-9b82-b7b1180e118d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.703404] env[62684]: DEBUG nova.compute.provider_tree [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2170.708388] env[62684]: DEBUG oslo_vmware.api [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 2170.708388] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520e49dc-1535-1fd8-f65a-8ea1bdcb476b" [ 2170.708388] env[62684]: _type = "Task" [ 2170.708388] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2170.719743] env[62684]: DEBUG oslo_vmware.api [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520e49dc-1535-1fd8-f65a-8ea1bdcb476b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2170.849283] env[62684]: DEBUG nova.network.neutron [req-592906b3-b96a-4a4a-8379-a7b1f6657c0f req-376a9b0f-9ad3-4e0d-9f9c-726d52791044 service nova] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Updated VIF entry in instance network info cache for port b5747949-00d7-4815-9080-52285a6a8813. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2170.849283] env[62684]: DEBUG nova.network.neutron [req-592906b3-b96a-4a4a-8379-a7b1f6657c0f req-376a9b0f-9ad3-4e0d-9f9c-726d52791044 service nova] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Updating instance_info_cache with network_info: [{"id": "b5747949-00d7-4815-9080-52285a6a8813", "address": "fa:16:3e:fd:34:0c", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5747949-00", "ovs_interfaceid": "b5747949-00d7-4815-9080-52285a6a8813", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2171.119406] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6e8cbf2e-0f5d-4a8f-8276-9ab609c48130 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Acquiring lock "6b461482-0606-4af3-98a2-88c0318d1a69" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2171.119756] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6e8cbf2e-0f5d-4a8f-8276-9ab609c48130 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Lock "6b461482-0606-4af3-98a2-88c0318d1a69" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2171.119987] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6e8cbf2e-0f5d-4a8f-8276-9ab609c48130 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Acquiring lock "6b461482-0606-4af3-98a2-88c0318d1a69-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2171.120259] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6e8cbf2e-0f5d-4a8f-8276-9ab609c48130 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Lock "6b461482-0606-4af3-98a2-88c0318d1a69-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2171.120473] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6e8cbf2e-0f5d-4a8f-8276-9ab609c48130 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Lock "6b461482-0606-4af3-98a2-88c0318d1a69-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2171.122645] env[62684]: INFO nova.compute.manager [None req-6e8cbf2e-0f5d-4a8f-8276-9ab609c48130 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Terminating instance [ 2171.124311] env[62684]: DEBUG nova.compute.manager [None req-6e8cbf2e-0f5d-4a8f-8276-9ab609c48130 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2171.124511] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6e8cbf2e-0f5d-4a8f-8276-9ab609c48130 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2171.125362] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f4bd37d-465a-493d-bbad-d3b6f00a8223 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.134081] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e8cbf2e-0f5d-4a8f-8276-9ab609c48130 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2171.134356] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2e0b7653-1a63-4f47-8e31-2bdf54413651 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.141204] env[62684]: DEBUG oslo_vmware.api [None req-6e8cbf2e-0f5d-4a8f-8276-9ab609c48130 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Waiting for the task: (returnval){ [ 2171.141204] env[62684]: value = "task-2053475" [ 2171.141204] env[62684]: _type = "Task" [ 2171.141204] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2171.151021] env[62684]: DEBUG oslo_vmware.api [None req-6e8cbf2e-0f5d-4a8f-8276-9ab609c48130 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Task: {'id': task-2053475, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2171.174076] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bd6acd43-037a-4d4f-b6d5-f0486cb90413 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "584845d2-d146-42bf-8ef5-58532fe24f65" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.386s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2171.208642] env[62684]: DEBUG nova.scheduler.client.report [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2171.226836] env[62684]: DEBUG oslo_vmware.api [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520e49dc-1535-1fd8-f65a-8ea1bdcb476b, 'name': SearchDatastore_Task, 'duration_secs': 0.010196} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2171.226836] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2171.226836] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 2baabe7a-ed33-4cef-9acc-a7b804610b0a/3931321c-cb4c-4b87-8d3a-50e05ea01db2-rescue.vmdk. {{(pid=62684) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 2171.227060] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e19b786a-dba0-492b-ae26-1f2c58362948 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.234601] env[62684]: DEBUG oslo_vmware.api [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 2171.234601] env[62684]: value = "task-2053476" [ 2171.234601] env[62684]: _type = "Task" [ 2171.234601] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2171.246016] env[62684]: DEBUG oslo_vmware.api [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053476, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2171.351430] env[62684]: DEBUG oslo_concurrency.lockutils [req-592906b3-b96a-4a4a-8379-a7b1f6657c0f req-376a9b0f-9ad3-4e0d-9f9c-726d52791044 service nova] Releasing lock "refresh_cache-f037d6b2-2082-4611-985e-b9a077eb8250" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2171.651372] env[62684]: DEBUG oslo_vmware.api [None req-6e8cbf2e-0f5d-4a8f-8276-9ab609c48130 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Task: {'id': task-2053475, 'name': PowerOffVM_Task, 'duration_secs': 0.19669} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2171.651669] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e8cbf2e-0f5d-4a8f-8276-9ab609c48130 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2171.651838] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6e8cbf2e-0f5d-4a8f-8276-9ab609c48130 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2171.652126] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a6078b2f-52fa-4122-aa0b-8d60697f33ac {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.714856] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.461s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2171.717496] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b18586d5-1630-414a-b407-d0d4db62dc0f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 3.816s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2171.747165] env[62684]: DEBUG oslo_vmware.api [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053476, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2171.758023] env[62684]: INFO nova.network.neutron [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Updating port da310d7c-cd12-49ca-8014-efa9469aef45 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 2172.133387] env[62684]: DEBUG nova.compute.manager [req-fccb0c92-3b83-4e19-8dcc-9f9d04ec5d11 req-fc8044df-d121-4a18-bc60-121db4b7d163 service nova] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Received event network-changed-efda8f4d-97b4-44f8-b30b-d26145e98e58 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2172.133588] env[62684]: DEBUG nova.compute.manager [req-fccb0c92-3b83-4e19-8dcc-9f9d04ec5d11 req-fc8044df-d121-4a18-bc60-121db4b7d163 service nova] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Refreshing instance network info cache due to event network-changed-efda8f4d-97b4-44f8-b30b-d26145e98e58. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2172.133816] env[62684]: DEBUG oslo_concurrency.lockutils [req-fccb0c92-3b83-4e19-8dcc-9f9d04ec5d11 req-fc8044df-d121-4a18-bc60-121db4b7d163 service nova] Acquiring lock "refresh_cache-d7f09d0e-f7b6-415e-8d82-47eba1153aa1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2172.133964] env[62684]: DEBUG oslo_concurrency.lockutils [req-fccb0c92-3b83-4e19-8dcc-9f9d04ec5d11 req-fc8044df-d121-4a18-bc60-121db4b7d163 service nova] Acquired lock "refresh_cache-d7f09d0e-f7b6-415e-8d82-47eba1153aa1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2172.134629] env[62684]: DEBUG nova.network.neutron [req-fccb0c92-3b83-4e19-8dcc-9f9d04ec5d11 req-fc8044df-d121-4a18-bc60-121db4b7d163 service nova] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Refreshing network info cache for port efda8f4d-97b4-44f8-b30b-d26145e98e58 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2172.213089] env[62684]: DEBUG nova.compute.manager [req-21a6b35a-0cd7-48ed-9601-f09176851f51 req-f847ed03-6a2e-4371-ba48-0c63994f06b3 service nova] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Received event network-changed-efda8f4d-97b4-44f8-b30b-d26145e98e58 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2172.213804] env[62684]: DEBUG nova.compute.manager [req-21a6b35a-0cd7-48ed-9601-f09176851f51 req-f847ed03-6a2e-4371-ba48-0c63994f06b3 service nova] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Refreshing instance network info cache due to event network-changed-efda8f4d-97b4-44f8-b30b-d26145e98e58. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2172.213804] env[62684]: DEBUG oslo_concurrency.lockutils [req-21a6b35a-0cd7-48ed-9601-f09176851f51 req-f847ed03-6a2e-4371-ba48-0c63994f06b3 service nova] Acquiring lock "refresh_cache-d7f09d0e-f7b6-415e-8d82-47eba1153aa1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2172.246017] env[62684]: DEBUG oslo_vmware.api [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053476, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.670506} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2172.246510] env[62684]: INFO nova.virt.vmwareapi.ds_util [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 2baabe7a-ed33-4cef-9acc-a7b804610b0a/3931321c-cb4c-4b87-8d3a-50e05ea01db2-rescue.vmdk. [ 2172.247372] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db1809c4-847c-4a72-b6ce-6340dcdd1e0b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.285919] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] 2baabe7a-ed33-4cef-9acc-a7b804610b0a/3931321c-cb4c-4b87-8d3a-50e05ea01db2-rescue.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2172.289417] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f0ee0027-3033-4fd7-838c-0401d0831240 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.309285] env[62684]: DEBUG oslo_vmware.api [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 2172.309285] env[62684]: value = "task-2053478" [ 2172.309285] env[62684]: _type = "Task" [ 2172.309285] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2172.319621] env[62684]: DEBUG oslo_vmware.api [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053478, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2172.531261] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-811421b0-6b09-4ffb-a3c8-7bc9262413e1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.539777] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc9146a1-ce8d-42b1-8c78-46d0da92d841 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.570937] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-618334a9-35b3-437b-8e48-b7ab72bb4e1f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.578875] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d3a3a4c-4ae8-4cdf-8c5c-c8288042276d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.593909] env[62684]: DEBUG nova.compute.provider_tree [None req-b18586d5-1630-414a-b407-d0d4db62dc0f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2172.762748] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2172.763172] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2172.763484] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Deleting the datastore file [datastore1] aebbc2cc-8973-4907-9ec8-085027fd7ca3 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2172.763877] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e79bfc79-089f-46be-afa4-909c12c92b2f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.772727] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2172.773557] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2172.773557] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Deleting the datastore file [datastore2] 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2172.774715] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d8d6a626-fb3b-49bc-a60c-2dea0707ccd0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.776799] env[62684]: DEBUG oslo_vmware.api [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2172.776799] env[62684]: value = "task-2053479" [ 2172.776799] env[62684]: _type = "Task" [ 2172.776799] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2172.783954] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-495243ee-91a6-4e98-baaa-85e87b56e7f6 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2172.784234] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-495243ee-91a6-4e98-baaa-85e87b56e7f6 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2172.784443] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-495243ee-91a6-4e98-baaa-85e87b56e7f6 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Deleting the datastore file [datastore2] 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2172.784761] env[62684]: DEBUG oslo_vmware.api [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for the task: (returnval){ [ 2172.784761] env[62684]: value = "task-2053480" [ 2172.784761] env[62684]: _type = "Task" [ 2172.784761] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2172.785323] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e51a340a-fda1-4faa-9d82-9cf1a63e68fc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.790687] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6e8cbf2e-0f5d-4a8f-8276-9ab609c48130 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2172.791014] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6e8cbf2e-0f5d-4a8f-8276-9ab609c48130 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2172.791078] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e8cbf2e-0f5d-4a8f-8276-9ab609c48130 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Deleting the datastore file [datastore1] 6b461482-0606-4af3-98a2-88c0318d1a69 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2172.796751] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-947de7c2-9282-4894-a341-2100013ee390 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.798417] env[62684]: DEBUG oslo_vmware.api [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053479, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2172.800666] env[62684]: DEBUG oslo_vmware.api [None req-495243ee-91a6-4e98-baaa-85e87b56e7f6 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for the task: (returnval){ [ 2172.800666] env[62684]: value = "task-2053481" [ 2172.800666] env[62684]: _type = "Task" [ 2172.800666] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2172.803790] env[62684]: DEBUG oslo_vmware.api [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053480, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2172.808132] env[62684]: DEBUG oslo_vmware.api [None req-6e8cbf2e-0f5d-4a8f-8276-9ab609c48130 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Waiting for the task: (returnval){ [ 2172.808132] env[62684]: value = "task-2053482" [ 2172.808132] env[62684]: _type = "Task" [ 2172.808132] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2172.815197] env[62684]: DEBUG oslo_vmware.api [None req-495243ee-91a6-4e98-baaa-85e87b56e7f6 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053481, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2172.822280] env[62684]: DEBUG oslo_vmware.api [None req-6e8cbf2e-0f5d-4a8f-8276-9ab609c48130 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Task: {'id': task-2053482, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2172.825637] env[62684]: DEBUG oslo_vmware.api [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053478, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2172.876164] env[62684]: DEBUG nova.network.neutron [req-fccb0c92-3b83-4e19-8dcc-9f9d04ec5d11 req-fc8044df-d121-4a18-bc60-121db4b7d163 service nova] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Updated VIF entry in instance network info cache for port efda8f4d-97b4-44f8-b30b-d26145e98e58. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2172.876600] env[62684]: DEBUG nova.network.neutron [req-fccb0c92-3b83-4e19-8dcc-9f9d04ec5d11 req-fc8044df-d121-4a18-bc60-121db4b7d163 service nova] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Updating instance_info_cache with network_info: [{"id": "efda8f4d-97b4-44f8-b30b-d26145e98e58", "address": "fa:16:3e:be:97:33", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefda8f4d-97", "ovs_interfaceid": "efda8f4d-97b4-44f8-b30b-d26145e98e58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2173.097441] env[62684]: DEBUG nova.scheduler.client.report [None req-b18586d5-1630-414a-b407-d0d4db62dc0f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2173.287728] env[62684]: DEBUG oslo_vmware.api [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053479, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.183679} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2173.291670] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2173.292069] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2173.292397] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2173.295064] env[62684]: INFO nova.compute.manager [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Took 7.68 seconds to destroy the instance on the hypervisor. [ 2173.295064] env[62684]: DEBUG oslo.service.loopingcall [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2173.295064] env[62684]: DEBUG nova.compute.manager [-] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2173.295064] env[62684]: DEBUG nova.network.neutron [-] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2173.299517] env[62684]: DEBUG oslo_vmware.api [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053480, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.193138} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2173.302502] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2173.302502] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2173.302502] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2173.302502] env[62684]: INFO nova.compute.manager [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Took 3.67 seconds to destroy the instance on the hypervisor. [ 2173.302502] env[62684]: DEBUG oslo.service.loopingcall [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2173.302502] env[62684]: DEBUG nova.compute.manager [-] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2173.302502] env[62684]: DEBUG nova.network.neutron [-] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2173.311381] env[62684]: DEBUG oslo_vmware.api [None req-495243ee-91a6-4e98-baaa-85e87b56e7f6 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Task: {'id': task-2053481, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.177199} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2173.314190] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-495243ee-91a6-4e98-baaa-85e87b56e7f6 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2173.314532] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-495243ee-91a6-4e98-baaa-85e87b56e7f6 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2173.314883] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-495243ee-91a6-4e98-baaa-85e87b56e7f6 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2173.315231] env[62684]: INFO nova.compute.manager [None req-495243ee-91a6-4e98-baaa-85e87b56e7f6 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Took 5.93 seconds to destroy the instance on the hypervisor. [ 2173.318063] env[62684]: DEBUG oslo.service.loopingcall [None req-495243ee-91a6-4e98-baaa-85e87b56e7f6 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2173.318063] env[62684]: DEBUG nova.compute.manager [-] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2173.318063] env[62684]: DEBUG nova.network.neutron [-] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2173.324097] env[62684]: DEBUG oslo_vmware.api [None req-6e8cbf2e-0f5d-4a8f-8276-9ab609c48130 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Task: {'id': task-2053482, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.178258} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2173.324815] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e8cbf2e-0f5d-4a8f-8276-9ab609c48130 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2173.325242] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6e8cbf2e-0f5d-4a8f-8276-9ab609c48130 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2173.325581] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6e8cbf2e-0f5d-4a8f-8276-9ab609c48130 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2173.325893] env[62684]: INFO nova.compute.manager [None req-6e8cbf2e-0f5d-4a8f-8276-9ab609c48130 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Took 2.20 seconds to destroy the instance on the hypervisor. [ 2173.329068] env[62684]: DEBUG oslo.service.loopingcall [None req-6e8cbf2e-0f5d-4a8f-8276-9ab609c48130 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2173.329355] env[62684]: DEBUG nova.compute.manager [-] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2173.329588] env[62684]: DEBUG nova.network.neutron [-] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2173.331536] env[62684]: DEBUG oslo_vmware.api [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053478, 'name': ReconfigVM_Task, 'duration_secs': 0.69083} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2173.332269] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Reconfigured VM instance instance-0000003f to attach disk [datastore1] 2baabe7a-ed33-4cef-9acc-a7b804610b0a/3931321c-cb4c-4b87-8d3a-50e05ea01db2-rescue.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2173.333708] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b000b95b-a9ac-4315-bae6-5a3f928a7c3d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.367609] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c096ef5e-77d0-414b-a148-0082137bd681 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.379855] env[62684]: DEBUG oslo_concurrency.lockutils [req-fccb0c92-3b83-4e19-8dcc-9f9d04ec5d11 req-fc8044df-d121-4a18-bc60-121db4b7d163 service nova] Releasing lock "refresh_cache-d7f09d0e-f7b6-415e-8d82-47eba1153aa1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2173.381024] env[62684]: DEBUG oslo_concurrency.lockutils [req-21a6b35a-0cd7-48ed-9601-f09176851f51 req-f847ed03-6a2e-4371-ba48-0c63994f06b3 service nova] Acquired lock "refresh_cache-d7f09d0e-f7b6-415e-8d82-47eba1153aa1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2173.381385] env[62684]: DEBUG nova.network.neutron [req-21a6b35a-0cd7-48ed-9601-f09176851f51 req-f847ed03-6a2e-4371-ba48-0c63994f06b3 service nova] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Refreshing network info cache for port efda8f4d-97b4-44f8-b30b-d26145e98e58 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2173.391023] env[62684]: DEBUG oslo_vmware.api [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 2173.391023] env[62684]: value = "task-2053483" [ 2173.391023] env[62684]: _type = "Task" [ 2173.391023] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2173.399729] env[62684]: DEBUG oslo_vmware.api [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053483, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2173.888938] env[62684]: DEBUG nova.compute.manager [req-9440d88e-e8e6-4305-aa65-0516ec224667 req-cc216aea-79b4-4c4d-9162-730c82b3aedf service nova] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Received event network-vif-plugged-89a358de-54fa-41bb-ae43-85a7abbb900b {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2173.890050] env[62684]: DEBUG oslo_concurrency.lockutils [req-9440d88e-e8e6-4305-aa65-0516ec224667 req-cc216aea-79b4-4c4d-9162-730c82b3aedf service nova] Acquiring lock "f4fab142-8066-43c1-abaa-a9f66775114c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2173.890398] env[62684]: DEBUG oslo_concurrency.lockutils [req-9440d88e-e8e6-4305-aa65-0516ec224667 req-cc216aea-79b4-4c4d-9162-730c82b3aedf service nova] Lock "f4fab142-8066-43c1-abaa-a9f66775114c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2173.890888] env[62684]: DEBUG oslo_concurrency.lockutils [req-9440d88e-e8e6-4305-aa65-0516ec224667 req-cc216aea-79b4-4c4d-9162-730c82b3aedf service nova] Lock "f4fab142-8066-43c1-abaa-a9f66775114c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2173.891222] env[62684]: DEBUG nova.compute.manager [req-9440d88e-e8e6-4305-aa65-0516ec224667 req-cc216aea-79b4-4c4d-9162-730c82b3aedf service nova] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] No waiting events found dispatching network-vif-plugged-89a358de-54fa-41bb-ae43-85a7abbb900b {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2173.891594] env[62684]: WARNING nova.compute.manager [req-9440d88e-e8e6-4305-aa65-0516ec224667 req-cc216aea-79b4-4c4d-9162-730c82b3aedf service nova] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Received unexpected event network-vif-plugged-89a358de-54fa-41bb-ae43-85a7abbb900b for instance with vm_state building and task_state spawning. [ 2173.903748] env[62684]: DEBUG oslo_vmware.api [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053483, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2174.109295] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b18586d5-1630-414a-b407-d0d4db62dc0f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.392s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2174.214016] env[62684]: DEBUG nova.network.neutron [-] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2174.226338] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquiring lock "refresh_cache-daf1486b-d5c2-4341-8a27-36eeeb08cd26" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2174.226338] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquired lock "refresh_cache-daf1486b-d5c2-4341-8a27-36eeeb08cd26" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2174.226338] env[62684]: DEBUG nova.network.neutron [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2174.227714] env[62684]: DEBUG nova.network.neutron [-] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2174.251509] env[62684]: DEBUG nova.compute.manager [req-84cfbafa-1ff4-47fe-90e1-4259cd4dbf89 req-ee7422c4-7a39-4bac-b6f7-3fc6a430c6df service nova] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Received event network-changed-4424c4bf-2ffd-4b4a-89f5-b6e9c9faa84b {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2174.251710] env[62684]: DEBUG nova.compute.manager [req-84cfbafa-1ff4-47fe-90e1-4259cd4dbf89 req-ee7422c4-7a39-4bac-b6f7-3fc6a430c6df service nova] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Refreshing instance network info cache due to event network-changed-4424c4bf-2ffd-4b4a-89f5-b6e9c9faa84b. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2174.252037] env[62684]: DEBUG oslo_concurrency.lockutils [req-84cfbafa-1ff4-47fe-90e1-4259cd4dbf89 req-ee7422c4-7a39-4bac-b6f7-3fc6a430c6df service nova] Acquiring lock "refresh_cache-584845d2-d146-42bf-8ef5-58532fe24f65" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2174.253124] env[62684]: DEBUG oslo_concurrency.lockutils [req-84cfbafa-1ff4-47fe-90e1-4259cd4dbf89 req-ee7422c4-7a39-4bac-b6f7-3fc6a430c6df service nova] Acquired lock "refresh_cache-584845d2-d146-42bf-8ef5-58532fe24f65" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2174.253124] env[62684]: DEBUG nova.network.neutron [req-84cfbafa-1ff4-47fe-90e1-4259cd4dbf89 req-ee7422c4-7a39-4bac-b6f7-3fc6a430c6df service nova] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Refreshing network info cache for port 4424c4bf-2ffd-4b4a-89f5-b6e9c9faa84b {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2174.262057] env[62684]: DEBUG nova.compute.manager [req-be6f2bec-551e-4259-a293-30830cb660ab req-b5e3ff45-8b8a-45f8-bd31-2d011c233eaf service nova] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Received event network-changed-b5747949-00d7-4815-9080-52285a6a8813 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2174.262297] env[62684]: DEBUG nova.compute.manager [req-be6f2bec-551e-4259-a293-30830cb660ab req-b5e3ff45-8b8a-45f8-bd31-2d011c233eaf service nova] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Refreshing instance network info cache due to event network-changed-b5747949-00d7-4815-9080-52285a6a8813. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2174.262549] env[62684]: DEBUG oslo_concurrency.lockutils [req-be6f2bec-551e-4259-a293-30830cb660ab req-b5e3ff45-8b8a-45f8-bd31-2d011c233eaf service nova] Acquiring lock "refresh_cache-f037d6b2-2082-4611-985e-b9a077eb8250" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2174.262882] env[62684]: DEBUG oslo_concurrency.lockutils [req-be6f2bec-551e-4259-a293-30830cb660ab req-b5e3ff45-8b8a-45f8-bd31-2d011c233eaf service nova] Acquired lock "refresh_cache-f037d6b2-2082-4611-985e-b9a077eb8250" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2174.263106] env[62684]: DEBUG nova.network.neutron [req-be6f2bec-551e-4259-a293-30830cb660ab req-b5e3ff45-8b8a-45f8-bd31-2d011c233eaf service nova] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Refreshing network info cache for port b5747949-00d7-4815-9080-52285a6a8813 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2174.408095] env[62684]: DEBUG oslo_vmware.api [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053483, 'name': ReconfigVM_Task, 'duration_secs': 0.80146} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2174.408994] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2174.409374] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-45c0bf0a-edb0-403e-86b7-46fe3362632d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.416472] env[62684]: DEBUG oslo_vmware.api [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 2174.416472] env[62684]: value = "task-2053484" [ 2174.416472] env[62684]: _type = "Task" [ 2174.416472] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2174.428084] env[62684]: DEBUG oslo_vmware.api [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053484, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2174.444608] env[62684]: DEBUG nova.network.neutron [req-21a6b35a-0cd7-48ed-9601-f09176851f51 req-f847ed03-6a2e-4371-ba48-0c63994f06b3 service nova] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Updated VIF entry in instance network info cache for port efda8f4d-97b4-44f8-b30b-d26145e98e58. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2174.444952] env[62684]: DEBUG nova.network.neutron [req-21a6b35a-0cd7-48ed-9601-f09176851f51 req-f847ed03-6a2e-4371-ba48-0c63994f06b3 service nova] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Updating instance_info_cache with network_info: [{"id": "efda8f4d-97b4-44f8-b30b-d26145e98e58", "address": "fa:16:3e:be:97:33", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefda8f4d-97", "ovs_interfaceid": "efda8f4d-97b4-44f8-b30b-d26145e98e58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2174.449222] env[62684]: DEBUG nova.network.neutron [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Successfully updated port: 89a358de-54fa-41bb-ae43-85a7abbb900b {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2174.483080] env[62684]: DEBUG nova.network.neutron [-] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2174.562400] env[62684]: DEBUG nova.network.neutron [req-84cfbafa-1ff4-47fe-90e1-4259cd4dbf89 req-ee7422c4-7a39-4bac-b6f7-3fc6a430c6df service nova] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Updated VIF entry in instance network info cache for port 4424c4bf-2ffd-4b4a-89f5-b6e9c9faa84b. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2174.563589] env[62684]: DEBUG nova.network.neutron [req-84cfbafa-1ff4-47fe-90e1-4259cd4dbf89 req-ee7422c4-7a39-4bac-b6f7-3fc6a430c6df service nova] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Updating instance_info_cache with network_info: [{"id": "4424c4bf-2ffd-4b4a-89f5-b6e9c9faa84b", "address": "fa:16:3e:e9:be:8c", "network": {"id": "bf53c8de-5f43-4a15-9911-25340615a63b", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1946277195-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "540d70f4b6274c38a5e79c00e389d8fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6db039c-542c-4544-a57d-ddcc6c1e8e45", "external-id": "nsx-vlan-transportzone-810", "segmentation_id": 810, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4424c4bf-2f", "ovs_interfaceid": "4424c4bf-2ffd-4b4a-89f5-b6e9c9faa84b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2174.647478] env[62684]: DEBUG nova.network.neutron [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Successfully updated port: fe223d4f-0585-454f-b724-0cdff1d2ceea {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2174.698264] env[62684]: INFO nova.scheduler.client.report [None req-b18586d5-1630-414a-b407-d0d4db62dc0f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Deleted allocation for migration 951ca425-53d8-45a1-867b-812651c44c78 [ 2174.717038] env[62684]: INFO nova.compute.manager [-] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Took 1.42 seconds to deallocate network for instance. [ 2174.734995] env[62684]: INFO nova.compute.manager [-] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Took 1.44 seconds to deallocate network for instance. [ 2174.930355] env[62684]: DEBUG oslo_vmware.api [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053484, 'name': PowerOnVM_Task, 'duration_secs': 0.402871} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2174.930355] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2174.932608] env[62684]: DEBUG nova.compute.manager [None req-d33b8630-f1b9-40df-82fb-3195ddf64371 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2174.934459] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd0560ac-0229-4974-869b-e360006e32ba {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.946133] env[62684]: DEBUG nova.network.neutron [-] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2174.948304] env[62684]: DEBUG oslo_concurrency.lockutils [req-21a6b35a-0cd7-48ed-9601-f09176851f51 req-f847ed03-6a2e-4371-ba48-0c63994f06b3 service nova] Releasing lock "refresh_cache-d7f09d0e-f7b6-415e-8d82-47eba1153aa1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2174.953331] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "refresh_cache-f4fab142-8066-43c1-abaa-a9f66775114c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2174.953619] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquired lock "refresh_cache-f4fab142-8066-43c1-abaa-a9f66775114c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2174.953873] env[62684]: DEBUG nova.network.neutron [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2174.987645] env[62684]: INFO nova.compute.manager [-] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Took 1.67 seconds to deallocate network for instance. [ 2175.054947] env[62684]: DEBUG nova.network.neutron [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Updating instance_info_cache with network_info: [{"id": "da310d7c-cd12-49ca-8014-efa9469aef45", "address": "fa:16:3e:5a:6f:ea", "network": {"id": "64494ea7-f6d9-430c-8ac7-e876e763004b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2056829508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.164", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e57b232a7e7647c7a3b2bca3c096feb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda310d7c-cd", "ovs_interfaceid": "da310d7c-cd12-49ca-8014-efa9469aef45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2175.067602] env[62684]: DEBUG oslo_concurrency.lockutils [req-84cfbafa-1ff4-47fe-90e1-4259cd4dbf89 req-ee7422c4-7a39-4bac-b6f7-3fc6a430c6df service nova] Releasing lock "refresh_cache-584845d2-d146-42bf-8ef5-58532fe24f65" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2175.067602] env[62684]: DEBUG nova.compute.manager [req-84cfbafa-1ff4-47fe-90e1-4259cd4dbf89 req-ee7422c4-7a39-4bac-b6f7-3fc6a430c6df service nova] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Received event network-vif-deleted-9755599a-bf6c-415f-b6dc-88d5d3774944 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2175.067602] env[62684]: INFO nova.compute.manager [req-84cfbafa-1ff4-47fe-90e1-4259cd4dbf89 req-ee7422c4-7a39-4bac-b6f7-3fc6a430c6df service nova] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Neutron deleted interface 9755599a-bf6c-415f-b6dc-88d5d3774944; detaching it from the instance and deleting it from the info cache [ 2175.067602] env[62684]: DEBUG nova.network.neutron [req-84cfbafa-1ff4-47fe-90e1-4259cd4dbf89 req-ee7422c4-7a39-4bac-b6f7-3fc6a430c6df service nova] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2175.130355] env[62684]: DEBUG nova.network.neutron [req-be6f2bec-551e-4259-a293-30830cb660ab req-b5e3ff45-8b8a-45f8-bd31-2d011c233eaf service nova] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Updated VIF entry in instance network info cache for port b5747949-00d7-4815-9080-52285a6a8813. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2175.130634] env[62684]: DEBUG nova.network.neutron [req-be6f2bec-551e-4259-a293-30830cb660ab req-b5e3ff45-8b8a-45f8-bd31-2d011c233eaf service nova] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Updating instance_info_cache with network_info: [{"id": "b5747949-00d7-4815-9080-52285a6a8813", "address": "fa:16:3e:fd:34:0c", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.240", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5747949-00", "ovs_interfaceid": "b5747949-00d7-4815-9080-52285a6a8813", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2175.146722] env[62684]: DEBUG oslo_concurrency.lockutils [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "refresh_cache-99a9653c-7221-4495-be5f-5441dc8da0f4" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2175.146980] env[62684]: DEBUG oslo_concurrency.lockutils [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquired lock "refresh_cache-99a9653c-7221-4495-be5f-5441dc8da0f4" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2175.147230] env[62684]: DEBUG nova.network.neutron [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2175.209692] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b18586d5-1630-414a-b407-d0d4db62dc0f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "0a8d7c48-cf90-4baf-a900-38fbd62869a6" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 11.385s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2175.224527] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2175.224527] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2175.224603] env[62684]: DEBUG nova.objects.instance [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Lazy-loading 'resources' on Instance uuid 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2175.292762] env[62684]: INFO nova.compute.manager [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Took 0.56 seconds to detach 1 volumes for instance. [ 2175.449292] env[62684]: INFO nova.compute.manager [-] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Took 2.12 seconds to deallocate network for instance. [ 2175.497229] env[62684]: DEBUG oslo_concurrency.lockutils [None req-495243ee-91a6-4e98-baaa-85e87b56e7f6 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2175.505856] env[62684]: DEBUG nova.network.neutron [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2175.558116] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Releasing lock "refresh_cache-daf1486b-d5c2-4341-8a27-36eeeb08cd26" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2175.570559] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6d816d20-8671-4dce-a057-accf6d0c102a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.580396] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fc8e844-9eef-4745-9788-ad36a1445562 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.603946] env[62684]: DEBUG nova.virt.hardware [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='ce16b52c92f2c28e57b47544e7315286',container_format='bare',created_at=2025-01-10T07:53:01Z,direct_url=,disk_format='vmdk',id=b5ede0c6-ad0d-4c75-b005-a332dfdc71df,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-794753430-shelved',owner='e57b232a7e7647c7a3b2bca3c096feb7',properties=ImageMetaProps,protected=,size=31661056,status='active',tags=,updated_at=2025-01-10T07:53:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2175.604237] env[62684]: DEBUG nova.virt.hardware [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2175.604401] env[62684]: DEBUG nova.virt.hardware [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2175.604588] env[62684]: DEBUG nova.virt.hardware [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2175.604897] env[62684]: DEBUG nova.virt.hardware [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2175.605186] env[62684]: DEBUG nova.virt.hardware [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2175.605402] env[62684]: DEBUG nova.virt.hardware [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2175.605528] env[62684]: DEBUG nova.virt.hardware [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2175.605750] env[62684]: DEBUG nova.virt.hardware [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2175.605873] env[62684]: DEBUG nova.virt.hardware [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2175.606066] env[62684]: DEBUG nova.virt.hardware [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2175.610955] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a93f8d2c-c47f-42c3-a0bc-ff20667982cc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.621303] env[62684]: DEBUG nova.compute.manager [req-84cfbafa-1ff4-47fe-90e1-4259cd4dbf89 req-ee7422c4-7a39-4bac-b6f7-3fc6a430c6df service nova] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Detach interface failed, port_id=9755599a-bf6c-415f-b6dc-88d5d3774944, reason: Instance 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2175.621543] env[62684]: DEBUG nova.compute.manager [req-84cfbafa-1ff4-47fe-90e1-4259cd4dbf89 req-ee7422c4-7a39-4bac-b6f7-3fc6a430c6df service nova] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Received event network-vif-deleted-ca16d302-c6d1-48a0-ac08-8031db433cc7 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2175.621731] env[62684]: INFO nova.compute.manager [req-84cfbafa-1ff4-47fe-90e1-4259cd4dbf89 req-ee7422c4-7a39-4bac-b6f7-3fc6a430c6df service nova] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Neutron deleted interface ca16d302-c6d1-48a0-ac08-8031db433cc7; detaching it from the instance and deleting it from the info cache [ 2175.621896] env[62684]: DEBUG nova.network.neutron [req-84cfbafa-1ff4-47fe-90e1-4259cd4dbf89 req-ee7422c4-7a39-4bac-b6f7-3fc6a430c6df service nova] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2175.630751] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2f3cb86-afc5-4caf-9570-614dfffb3d72 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.635883] env[62684]: DEBUG oslo_concurrency.lockutils [req-be6f2bec-551e-4259-a293-30830cb660ab req-b5e3ff45-8b8a-45f8-bd31-2d011c233eaf service nova] Releasing lock "refresh_cache-f037d6b2-2082-4611-985e-b9a077eb8250" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2175.635883] env[62684]: DEBUG nova.compute.manager [req-be6f2bec-551e-4259-a293-30830cb660ab req-b5e3ff45-8b8a-45f8-bd31-2d011c233eaf service nova] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Received event network-vif-deleted-4be5b16b-2fb8-4e61-a763-f521de30fe81 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2175.636028] env[62684]: INFO nova.compute.manager [req-be6f2bec-551e-4259-a293-30830cb660ab req-b5e3ff45-8b8a-45f8-bd31-2d011c233eaf service nova] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Neutron deleted interface 4be5b16b-2fb8-4e61-a763-f521de30fe81; detaching it from the instance and deleting it from the info cache [ 2175.636248] env[62684]: DEBUG nova.network.neutron [req-be6f2bec-551e-4259-a293-30830cb660ab req-b5e3ff45-8b8a-45f8-bd31-2d011c233eaf service nova] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2175.649839] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5a:6f:ea', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6966f473-59ac-49bb-9b7a-22c61f4e61e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'da310d7c-cd12-49ca-8014-efa9469aef45', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2175.658366] env[62684]: DEBUG oslo.service.loopingcall [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2175.660117] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2175.660599] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e435bf7e-3f2e-4f45-bf20-61168e00dd02 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.685500] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2175.685500] env[62684]: value = "task-2053485" [ 2175.685500] env[62684]: _type = "Task" [ 2175.685500] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2175.694278] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053485, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2175.717481] env[62684]: DEBUG nova.network.neutron [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Updating instance_info_cache with network_info: [{"id": "89a358de-54fa-41bb-ae43-85a7abbb900b", "address": "fa:16:3e:29:1c:3d", "network": {"id": "aa52badb-0b73-48bc-afaa-5e06a97d5c7d", "bridge": "br-int", "label": "tempest-ServersTestJSON-556342067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c54f74085f343d2b790145b0d82a9f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89a358de-54", "ovs_interfaceid": "89a358de-54fa-41bb-ae43-85a7abbb900b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2175.720567] env[62684]: DEBUG nova.network.neutron [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2175.803315] env[62684]: DEBUG oslo_concurrency.lockutils [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2175.955726] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6e8cbf2e-0f5d-4a8f-8276-9ab609c48130 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2176.011802] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8958a0b2-e91b-4f95-b3da-7a07dc667959 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.019868] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f671a20-62bc-4146-a066-28f1414b130b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.057999] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65e9657d-e9b2-4c17-a506-7e5fc235ecab {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.069531] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e319553b-20ea-4ddc-befd-52781e75d2cb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.085314] env[62684]: DEBUG nova.compute.provider_tree [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2176.127019] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2634cf29-e746-41d1-be8c-6555a1be495e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.138021] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6217bdd-c42f-483a-8ded-4060c861a390 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.150845] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-26281ffe-d6b2-48db-aefe-e4df8f8a6ec6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.165022] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f1ca22e-b4c8-499c-833d-ea34ad16fd51 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.188380] env[62684]: DEBUG nova.compute.manager [req-84cfbafa-1ff4-47fe-90e1-4259cd4dbf89 req-ee7422c4-7a39-4bac-b6f7-3fc6a430c6df service nova] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Detach interface failed, port_id=ca16d302-c6d1-48a0-ac08-8031db433cc7, reason: Instance aebbc2cc-8973-4907-9ec8-085027fd7ca3 could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2176.189028] env[62684]: DEBUG nova.compute.manager [req-84cfbafa-1ff4-47fe-90e1-4259cd4dbf89 req-ee7422c4-7a39-4bac-b6f7-3fc6a430c6df service nova] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Received event network-vif-plugged-da310d7c-cd12-49ca-8014-efa9469aef45 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2176.189028] env[62684]: DEBUG oslo_concurrency.lockutils [req-84cfbafa-1ff4-47fe-90e1-4259cd4dbf89 req-ee7422c4-7a39-4bac-b6f7-3fc6a430c6df service nova] Acquiring lock "daf1486b-d5c2-4341-8a27-36eeeb08cd26-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2176.189297] env[62684]: DEBUG oslo_concurrency.lockutils [req-84cfbafa-1ff4-47fe-90e1-4259cd4dbf89 req-ee7422c4-7a39-4bac-b6f7-3fc6a430c6df service nova] Lock "daf1486b-d5c2-4341-8a27-36eeeb08cd26-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2176.189523] env[62684]: DEBUG oslo_concurrency.lockutils [req-84cfbafa-1ff4-47fe-90e1-4259cd4dbf89 req-ee7422c4-7a39-4bac-b6f7-3fc6a430c6df service nova] Lock "daf1486b-d5c2-4341-8a27-36eeeb08cd26-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2176.189782] env[62684]: DEBUG nova.compute.manager [req-84cfbafa-1ff4-47fe-90e1-4259cd4dbf89 req-ee7422c4-7a39-4bac-b6f7-3fc6a430c6df service nova] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] No waiting events found dispatching network-vif-plugged-da310d7c-cd12-49ca-8014-efa9469aef45 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2176.190565] env[62684]: WARNING nova.compute.manager [req-84cfbafa-1ff4-47fe-90e1-4259cd4dbf89 req-ee7422c4-7a39-4bac-b6f7-3fc6a430c6df service nova] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Received unexpected event network-vif-plugged-da310d7c-cd12-49ca-8014-efa9469aef45 for instance with vm_state shelved_offloaded and task_state spawning. [ 2176.200024] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053485, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2176.214080] env[62684]: DEBUG nova.compute.manager [req-be6f2bec-551e-4259-a293-30830cb660ab req-b5e3ff45-8b8a-45f8-bd31-2d011c233eaf service nova] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Detach interface failed, port_id=4be5b16b-2fb8-4e61-a763-f521de30fe81, reason: Instance 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5 could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2176.220017] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Releasing lock "refresh_cache-f4fab142-8066-43c1-abaa-a9f66775114c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2176.220311] env[62684]: DEBUG nova.compute.manager [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Instance network_info: |[{"id": "89a358de-54fa-41bb-ae43-85a7abbb900b", "address": "fa:16:3e:29:1c:3d", "network": {"id": "aa52badb-0b73-48bc-afaa-5e06a97d5c7d", "bridge": "br-int", "label": "tempest-ServersTestJSON-556342067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c54f74085f343d2b790145b0d82a9f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89a358de-54", "ovs_interfaceid": "89a358de-54fa-41bb-ae43-85a7abbb900b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2176.220738] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:29:1c:3d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1fb81f98-6f5a-47ab-a512-27277591d064', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '89a358de-54fa-41bb-ae43-85a7abbb900b', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2176.230441] env[62684]: DEBUG oslo.service.loopingcall [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2176.230441] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2176.230441] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-490a481c-34ce-4368-a949-a53de182b945 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.249613] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2176.249613] env[62684]: value = "task-2053486" [ 2176.249613] env[62684]: _type = "Task" [ 2176.249613] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2176.260472] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053486, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2176.280697] env[62684]: DEBUG nova.network.neutron [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Updating instance_info_cache with network_info: [{"id": "fe223d4f-0585-454f-b724-0cdff1d2ceea", "address": "fa:16:3e:9a:9c:00", "network": {"id": "e177c6d0-ddd5-4029-94af-c8f1b937dd9f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1344612161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27d04006afc747e19ad87238bfdbaad1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "171aeae0-6a27-44fc-bc3d-a2d5581fc702", "external-id": "nsx-vlan-transportzone-410", "segmentation_id": 410, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe223d4f-05", "ovs_interfaceid": "fe223d4f-0585-454f-b724-0cdff1d2ceea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2176.371348] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e0601ab0-81d2-4626-ac6d-5db833ef6fbe tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "0a8d7c48-cf90-4baf-a900-38fbd62869a6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2176.371639] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e0601ab0-81d2-4626-ac6d-5db833ef6fbe tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "0a8d7c48-cf90-4baf-a900-38fbd62869a6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2176.372292] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e0601ab0-81d2-4626-ac6d-5db833ef6fbe tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "0a8d7c48-cf90-4baf-a900-38fbd62869a6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2176.372504] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e0601ab0-81d2-4626-ac6d-5db833ef6fbe tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "0a8d7c48-cf90-4baf-a900-38fbd62869a6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2176.372701] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e0601ab0-81d2-4626-ac6d-5db833ef6fbe tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "0a8d7c48-cf90-4baf-a900-38fbd62869a6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2176.374875] env[62684]: INFO nova.compute.manager [None req-e0601ab0-81d2-4626-ac6d-5db833ef6fbe tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Terminating instance [ 2176.376563] env[62684]: DEBUG nova.compute.manager [None req-e0601ab0-81d2-4626-ac6d-5db833ef6fbe tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2176.376764] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e0601ab0-81d2-4626-ac6d-5db833ef6fbe tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2176.377599] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce546f25-aa8a-49e6-b903-ad921b4a300b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.385060] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0601ab0-81d2-4626-ac6d-5db833ef6fbe tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2176.385310] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cb01fac5-8408-4937-a8f7-faf46198a176 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.392440] env[62684]: DEBUG oslo_vmware.api [None req-e0601ab0-81d2-4626-ac6d-5db833ef6fbe tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2176.392440] env[62684]: value = "task-2053487" [ 2176.392440] env[62684]: _type = "Task" [ 2176.392440] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2176.400446] env[62684]: DEBUG oslo_vmware.api [None req-e0601ab0-81d2-4626-ac6d-5db833ef6fbe tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053487, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2176.548160] env[62684]: DEBUG nova.compute.manager [req-2f80b517-c0fe-49ab-a784-46836bfde8ec req-7e77f9ea-9b40-4821-b1f0-19ba843e29d9 service nova] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Received event network-changed-da310d7c-cd12-49ca-8014-efa9469aef45 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2176.548160] env[62684]: DEBUG nova.compute.manager [req-2f80b517-c0fe-49ab-a784-46836bfde8ec req-7e77f9ea-9b40-4821-b1f0-19ba843e29d9 service nova] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Refreshing instance network info cache due to event network-changed-da310d7c-cd12-49ca-8014-efa9469aef45. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2176.548160] env[62684]: DEBUG oslo_concurrency.lockutils [req-2f80b517-c0fe-49ab-a784-46836bfde8ec req-7e77f9ea-9b40-4821-b1f0-19ba843e29d9 service nova] Acquiring lock "refresh_cache-daf1486b-d5c2-4341-8a27-36eeeb08cd26" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2176.548160] env[62684]: DEBUG oslo_concurrency.lockutils [req-2f80b517-c0fe-49ab-a784-46836bfde8ec req-7e77f9ea-9b40-4821-b1f0-19ba843e29d9 service nova] Acquired lock "refresh_cache-daf1486b-d5c2-4341-8a27-36eeeb08cd26" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2176.548788] env[62684]: DEBUG nova.network.neutron [req-2f80b517-c0fe-49ab-a784-46836bfde8ec req-7e77f9ea-9b40-4821-b1f0-19ba843e29d9 service nova] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Refreshing network info cache for port da310d7c-cd12-49ca-8014-efa9469aef45 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2176.572485] env[62684]: DEBUG nova.compute.manager [req-1f0a8a1b-1813-4e2b-a352-1545050024f6 req-6d38270b-5a56-4edc-8258-8349e6d8b850 service nova] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Received event network-vif-deleted-77dcd033-4115-49dc-9ba1-8a05c4726df3 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2176.572721] env[62684]: DEBUG nova.compute.manager [req-1f0a8a1b-1813-4e2b-a352-1545050024f6 req-6d38270b-5a56-4edc-8258-8349e6d8b850 service nova] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Received event network-changed-89a358de-54fa-41bb-ae43-85a7abbb900b {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2176.575205] env[62684]: DEBUG nova.compute.manager [req-1f0a8a1b-1813-4e2b-a352-1545050024f6 req-6d38270b-5a56-4edc-8258-8349e6d8b850 service nova] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Refreshing instance network info cache due to event network-changed-89a358de-54fa-41bb-ae43-85a7abbb900b. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2176.575205] env[62684]: DEBUG oslo_concurrency.lockutils [req-1f0a8a1b-1813-4e2b-a352-1545050024f6 req-6d38270b-5a56-4edc-8258-8349e6d8b850 service nova] Acquiring lock "refresh_cache-f4fab142-8066-43c1-abaa-a9f66775114c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2176.575205] env[62684]: DEBUG oslo_concurrency.lockutils [req-1f0a8a1b-1813-4e2b-a352-1545050024f6 req-6d38270b-5a56-4edc-8258-8349e6d8b850 service nova] Acquired lock "refresh_cache-f4fab142-8066-43c1-abaa-a9f66775114c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2176.575205] env[62684]: DEBUG nova.network.neutron [req-1f0a8a1b-1813-4e2b-a352-1545050024f6 req-6d38270b-5a56-4edc-8258-8349e6d8b850 service nova] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Refreshing network info cache for port 89a358de-54fa-41bb-ae43-85a7abbb900b {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2176.610500] env[62684]: ERROR nova.scheduler.client.report [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [req-3a1699e1-f7b7-40fd-b609-609f7a7be681] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c23c281e-ec1f-4876-972e-a98655f2084f. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-3a1699e1-f7b7-40fd-b609-609f7a7be681"}]} [ 2176.627934] env[62684]: DEBUG nova.scheduler.client.report [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Refreshing inventories for resource provider c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2176.643772] env[62684]: DEBUG nova.scheduler.client.report [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Updating ProviderTree inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2176.644084] env[62684]: DEBUG nova.compute.provider_tree [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2176.657880] env[62684]: DEBUG nova.scheduler.client.report [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Refreshing aggregate associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, aggregates: None {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2176.676465] env[62684]: DEBUG nova.scheduler.client.report [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Refreshing trait associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2176.703102] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053485, 'name': CreateVM_Task, 'duration_secs': 0.584464} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2176.703102] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2176.703661] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b5ede0c6-ad0d-4c75-b005-a332dfdc71df" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2176.704031] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b5ede0c6-ad0d-4c75-b005-a332dfdc71df" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2176.704860] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b5ede0c6-ad0d-4c75-b005-a332dfdc71df" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2176.708473] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9b31dec-6d8f-41b0-ae14-c9410d334c79 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.715024] env[62684]: DEBUG oslo_vmware.api [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 2176.715024] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ed4723-ba2e-ec7d-c7c3-9dbce2848d13" [ 2176.715024] env[62684]: _type = "Task" [ 2176.715024] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2176.728020] env[62684]: DEBUG oslo_vmware.api [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ed4723-ba2e-ec7d-c7c3-9dbce2848d13, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2176.762478] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053486, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2176.783585] env[62684]: DEBUG oslo_concurrency.lockutils [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Releasing lock "refresh_cache-99a9653c-7221-4495-be5f-5441dc8da0f4" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2176.783965] env[62684]: DEBUG nova.compute.manager [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Instance network_info: |[{"id": "fe223d4f-0585-454f-b724-0cdff1d2ceea", "address": "fa:16:3e:9a:9c:00", "network": {"id": "e177c6d0-ddd5-4029-94af-c8f1b937dd9f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1344612161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27d04006afc747e19ad87238bfdbaad1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "171aeae0-6a27-44fc-bc3d-a2d5581fc702", "external-id": "nsx-vlan-transportzone-410", "segmentation_id": 410, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe223d4f-05", "ovs_interfaceid": "fe223d4f-0585-454f-b724-0cdff1d2ceea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2176.787033] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9a:9c:00', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '171aeae0-6a27-44fc-bc3d-a2d5581fc702', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fe223d4f-0585-454f-b724-0cdff1d2ceea', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2176.794520] env[62684]: DEBUG oslo.service.loopingcall [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2176.800187] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2176.801058] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-393c76d3-1b13-4162-a6a4-70bd2c07d351 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.828884] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2176.828884] env[62684]: value = "task-2053488" [ 2176.828884] env[62684]: _type = "Task" [ 2176.828884] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2176.842881] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053488, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2176.907378] env[62684]: DEBUG oslo_vmware.api [None req-e0601ab0-81d2-4626-ac6d-5db833ef6fbe tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053487, 'name': PowerOffVM_Task, 'duration_secs': 0.330352} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2176.907600] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0601ab0-81d2-4626-ac6d-5db833ef6fbe tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2176.907772] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e0601ab0-81d2-4626-ac6d-5db833ef6fbe tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2176.908048] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-44353f88-a01d-4619-8a54-fe67d6526455 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.978688] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4480131f-ab1c-456f-ad1f-af750871358d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.986328] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b516c318-40d2-4b06-b22b-c60687512c90 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.016486] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82730ee9-d9f8-4c53-8247-1db09f5ee157 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.024319] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5378777d-be77-44cb-99c1-7b0562911416 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.038051] env[62684]: DEBUG nova.compute.provider_tree [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2177.069208] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e0601ab0-81d2-4626-ac6d-5db833ef6fbe tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2177.069496] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e0601ab0-81d2-4626-ac6d-5db833ef6fbe tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2177.069678] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0601ab0-81d2-4626-ac6d-5db833ef6fbe tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Deleting the datastore file [datastore1] 0a8d7c48-cf90-4baf-a900-38fbd62869a6 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2177.069976] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7a2300de-a818-4423-9266-2821a3f6aea8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.079593] env[62684]: DEBUG oslo_vmware.api [None req-e0601ab0-81d2-4626-ac6d-5db833ef6fbe tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2177.079593] env[62684]: value = "task-2053490" [ 2177.079593] env[62684]: _type = "Task" [ 2177.079593] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2177.091579] env[62684]: DEBUG oslo_vmware.api [None req-e0601ab0-81d2-4626-ac6d-5db833ef6fbe tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053490, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2177.134193] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "264c6900-dbef-455e-95cc-1df73c735cc8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2177.134526] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "264c6900-dbef-455e-95cc-1df73c735cc8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2177.225962] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b5ede0c6-ad0d-4c75-b005-a332dfdc71df" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2177.226372] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Processing image b5ede0c6-ad0d-4c75-b005-a332dfdc71df {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2177.226680] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b5ede0c6-ad0d-4c75-b005-a332dfdc71df/b5ede0c6-ad0d-4c75-b005-a332dfdc71df.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2177.226793] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b5ede0c6-ad0d-4c75-b005-a332dfdc71df/b5ede0c6-ad0d-4c75-b005-a332dfdc71df.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2177.227013] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2177.227278] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-32b3b980-d7ce-4eeb-9a5c-9a701c24eda4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.236612] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2177.236804] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2177.237551] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a41a8c56-e449-4b99-aa23-56c5d1fd210a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.242623] env[62684]: DEBUG oslo_vmware.api [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 2177.242623] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520f1021-925c-5f5f-642d-0b33467a61f3" [ 2177.242623] env[62684]: _type = "Task" [ 2177.242623] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2177.251528] env[62684]: DEBUG oslo_vmware.api [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520f1021-925c-5f5f-642d-0b33467a61f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2177.260073] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053486, 'name': CreateVM_Task, 'duration_secs': 0.684872} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2177.262819] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2177.263869] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2177.263869] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2177.264030] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2177.264624] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b0af98a-b045-432a-a5fb-63613b28bdd8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.271036] env[62684]: DEBUG oslo_vmware.api [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2177.271036] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f80924-082f-1986-a4bf-3dcbf9928ca9" [ 2177.271036] env[62684]: _type = "Task" [ 2177.271036] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2177.285210] env[62684]: DEBUG oslo_vmware.api [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f80924-082f-1986-a4bf-3dcbf9928ca9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2177.301596] env[62684]: INFO nova.compute.manager [None req-775b639c-e2d0-4d46-b17d-122275e75251 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Unrescuing [ 2177.301965] env[62684]: DEBUG oslo_concurrency.lockutils [None req-775b639c-e2d0-4d46-b17d-122275e75251 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquiring lock "refresh_cache-2baabe7a-ed33-4cef-9acc-a7b804610b0a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2177.302301] env[62684]: DEBUG oslo_concurrency.lockutils [None req-775b639c-e2d0-4d46-b17d-122275e75251 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquired lock "refresh_cache-2baabe7a-ed33-4cef-9acc-a7b804610b0a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2177.302541] env[62684]: DEBUG nova.network.neutron [None req-775b639c-e2d0-4d46-b17d-122275e75251 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2177.340852] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053488, 'name': CreateVM_Task, 'duration_secs': 0.4181} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2177.340852] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2177.341491] env[62684]: DEBUG oslo_concurrency.lockutils [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2177.373107] env[62684]: DEBUG nova.network.neutron [req-1f0a8a1b-1813-4e2b-a352-1545050024f6 req-6d38270b-5a56-4edc-8258-8349e6d8b850 service nova] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Updated VIF entry in instance network info cache for port 89a358de-54fa-41bb-ae43-85a7abbb900b. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2177.373516] env[62684]: DEBUG nova.network.neutron [req-1f0a8a1b-1813-4e2b-a352-1545050024f6 req-6d38270b-5a56-4edc-8258-8349e6d8b850 service nova] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Updating instance_info_cache with network_info: [{"id": "89a358de-54fa-41bb-ae43-85a7abbb900b", "address": "fa:16:3e:29:1c:3d", "network": {"id": "aa52badb-0b73-48bc-afaa-5e06a97d5c7d", "bridge": "br-int", "label": "tempest-ServersTestJSON-556342067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c54f74085f343d2b790145b0d82a9f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89a358de-54", "ovs_interfaceid": "89a358de-54fa-41bb-ae43-85a7abbb900b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2177.466447] env[62684]: DEBUG nova.network.neutron [req-2f80b517-c0fe-49ab-a784-46836bfde8ec req-7e77f9ea-9b40-4821-b1f0-19ba843e29d9 service nova] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Updated VIF entry in instance network info cache for port da310d7c-cd12-49ca-8014-efa9469aef45. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2177.466861] env[62684]: DEBUG nova.network.neutron [req-2f80b517-c0fe-49ab-a784-46836bfde8ec req-7e77f9ea-9b40-4821-b1f0-19ba843e29d9 service nova] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Updating instance_info_cache with network_info: [{"id": "da310d7c-cd12-49ca-8014-efa9469aef45", "address": "fa:16:3e:5a:6f:ea", "network": {"id": "64494ea7-f6d9-430c-8ac7-e876e763004b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2056829508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.164", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e57b232a7e7647c7a3b2bca3c096feb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda310d7c-cd", "ovs_interfaceid": "da310d7c-cd12-49ca-8014-efa9469aef45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2177.559948] env[62684]: ERROR nova.scheduler.client.report [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [req-08ba12ee-2ae0-455b-8f5a-5d84566d41c8] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c23c281e-ec1f-4876-972e-a98655f2084f. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-08ba12ee-2ae0-455b-8f5a-5d84566d41c8"}]} [ 2177.576290] env[62684]: DEBUG nova.scheduler.client.report [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Refreshing inventories for resource provider c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2177.589775] env[62684]: DEBUG oslo_vmware.api [None req-e0601ab0-81d2-4626-ac6d-5db833ef6fbe tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053490, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.173863} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2177.590294] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0601ab0-81d2-4626-ac6d-5db833ef6fbe tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2177.590537] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e0601ab0-81d2-4626-ac6d-5db833ef6fbe tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2177.590724] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e0601ab0-81d2-4626-ac6d-5db833ef6fbe tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2177.590908] env[62684]: INFO nova.compute.manager [None req-e0601ab0-81d2-4626-ac6d-5db833ef6fbe tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Took 1.21 seconds to destroy the instance on the hypervisor. [ 2177.591167] env[62684]: DEBUG oslo.service.loopingcall [None req-e0601ab0-81d2-4626-ac6d-5db833ef6fbe tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2177.591373] env[62684]: DEBUG nova.compute.manager [-] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2177.591467] env[62684]: DEBUG nova.network.neutron [-] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2177.593658] env[62684]: DEBUG nova.scheduler.client.report [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Updating ProviderTree inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2177.593982] env[62684]: DEBUG nova.compute.provider_tree [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2177.608304] env[62684]: DEBUG nova.scheduler.client.report [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Refreshing aggregate associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, aggregates: None {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2177.631613] env[62684]: DEBUG nova.scheduler.client.report [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Refreshing trait associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2177.637272] env[62684]: DEBUG nova.compute.manager [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2177.760403] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Preparing fetch location {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2177.760710] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Fetch image to [datastore2] OSTACK_IMG_096271ae-590d-4396-ad3d-1c6f8160d4fa/OSTACK_IMG_096271ae-590d-4396-ad3d-1c6f8160d4fa.vmdk {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2177.760924] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Downloading stream optimized image b5ede0c6-ad0d-4c75-b005-a332dfdc71df to [datastore2] OSTACK_IMG_096271ae-590d-4396-ad3d-1c6f8160d4fa/OSTACK_IMG_096271ae-590d-4396-ad3d-1c6f8160d4fa.vmdk on the data store datastore2 as vApp {{(pid=62684) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 2177.761508] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Downloading image file data b5ede0c6-ad0d-4c75-b005-a332dfdc71df to the ESX as VM named 'OSTACK_IMG_096271ae-590d-4396-ad3d-1c6f8160d4fa' {{(pid=62684) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 2177.787306] env[62684]: DEBUG oslo_vmware.api [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f80924-082f-1986-a4bf-3dcbf9928ca9, 'name': SearchDatastore_Task, 'duration_secs': 0.02087} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2177.790144] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2177.790989] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2177.791888] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2177.792994] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2177.793069] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2177.793609] env[62684]: DEBUG oslo_concurrency.lockutils [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2177.794209] env[62684]: DEBUG oslo_concurrency.lockutils [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2177.794522] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1080b1a9-d571-4479-816b-3605dae30a27 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.799130] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d15e99d-5889-45ac-bec4-13d14f1580dc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.823738] env[62684]: DEBUG oslo_vmware.api [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2177.823738] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52085847-e483-6dff-7412-1f434cd35ce9" [ 2177.823738] env[62684]: _type = "Task" [ 2177.823738] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2177.828397] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2177.828668] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2177.829774] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70529a90-f1fa-47e2-876e-4c6669bc18ab {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.837525] env[62684]: DEBUG oslo_vmware.api [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52085847-e483-6dff-7412-1f434cd35ce9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2177.841395] env[62684]: DEBUG oslo_vmware.api [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2177.841395] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a8ed91-d19c-d4b0-f85b-2ee8a0521783" [ 2177.841395] env[62684]: _type = "Task" [ 2177.841395] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2177.858717] env[62684]: DEBUG oslo_vmware.api [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a8ed91-d19c-d4b0-f85b-2ee8a0521783, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2177.878849] env[62684]: DEBUG oslo_concurrency.lockutils [req-1f0a8a1b-1813-4e2b-a352-1545050024f6 req-6d38270b-5a56-4edc-8258-8349e6d8b850 service nova] Releasing lock "refresh_cache-f4fab142-8066-43c1-abaa-a9f66775114c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2177.879501] env[62684]: DEBUG nova.compute.manager [req-1f0a8a1b-1813-4e2b-a352-1545050024f6 req-6d38270b-5a56-4edc-8258-8349e6d8b850 service nova] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Received event network-vif-plugged-fe223d4f-0585-454f-b724-0cdff1d2ceea {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2177.879756] env[62684]: DEBUG oslo_concurrency.lockutils [req-1f0a8a1b-1813-4e2b-a352-1545050024f6 req-6d38270b-5a56-4edc-8258-8349e6d8b850 service nova] Acquiring lock "99a9653c-7221-4495-be5f-5441dc8da0f4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2177.879990] env[62684]: DEBUG oslo_concurrency.lockutils [req-1f0a8a1b-1813-4e2b-a352-1545050024f6 req-6d38270b-5a56-4edc-8258-8349e6d8b850 service nova] Lock "99a9653c-7221-4495-be5f-5441dc8da0f4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2177.880205] env[62684]: DEBUG oslo_concurrency.lockutils [req-1f0a8a1b-1813-4e2b-a352-1545050024f6 req-6d38270b-5a56-4edc-8258-8349e6d8b850 service nova] Lock "99a9653c-7221-4495-be5f-5441dc8da0f4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2177.880389] env[62684]: DEBUG nova.compute.manager [req-1f0a8a1b-1813-4e2b-a352-1545050024f6 req-6d38270b-5a56-4edc-8258-8349e6d8b850 service nova] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] No waiting events found dispatching network-vif-plugged-fe223d4f-0585-454f-b724-0cdff1d2ceea {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2177.880567] env[62684]: WARNING nova.compute.manager [req-1f0a8a1b-1813-4e2b-a352-1545050024f6 req-6d38270b-5a56-4edc-8258-8349e6d8b850 service nova] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Received unexpected event network-vif-plugged-fe223d4f-0585-454f-b724-0cdff1d2ceea for instance with vm_state building and task_state spawning. [ 2177.880752] env[62684]: DEBUG nova.compute.manager [req-1f0a8a1b-1813-4e2b-a352-1545050024f6 req-6d38270b-5a56-4edc-8258-8349e6d8b850 service nova] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Received event network-changed-fe223d4f-0585-454f-b724-0cdff1d2ceea {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2177.880924] env[62684]: DEBUG nova.compute.manager [req-1f0a8a1b-1813-4e2b-a352-1545050024f6 req-6d38270b-5a56-4edc-8258-8349e6d8b850 service nova] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Refreshing instance network info cache due to event network-changed-fe223d4f-0585-454f-b724-0cdff1d2ceea. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2177.881173] env[62684]: DEBUG oslo_concurrency.lockutils [req-1f0a8a1b-1813-4e2b-a352-1545050024f6 req-6d38270b-5a56-4edc-8258-8349e6d8b850 service nova] Acquiring lock "refresh_cache-99a9653c-7221-4495-be5f-5441dc8da0f4" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2177.881322] env[62684]: DEBUG oslo_concurrency.lockutils [req-1f0a8a1b-1813-4e2b-a352-1545050024f6 req-6d38270b-5a56-4edc-8258-8349e6d8b850 service nova] Acquired lock "refresh_cache-99a9653c-7221-4495-be5f-5441dc8da0f4" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2177.881570] env[62684]: DEBUG nova.network.neutron [req-1f0a8a1b-1813-4e2b-a352-1545050024f6 req-6d38270b-5a56-4edc-8258-8349e6d8b850 service nova] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Refreshing network info cache for port fe223d4f-0585-454f-b724-0cdff1d2ceea {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2177.888861] env[62684]: DEBUG oslo_vmware.rw_handles [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 2177.888861] env[62684]: value = "resgroup-9" [ 2177.888861] env[62684]: _type = "ResourcePool" [ 2177.888861] env[62684]: }. {{(pid=62684) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 2177.889169] env[62684]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-ed8f140f-49c8-4413-b4db-6c3dfa2721a7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.914514] env[62684]: DEBUG oslo_vmware.rw_handles [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lease: (returnval){ [ 2177.914514] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52314ec2-eeb4-28ae-545c-f1be55e46f6d" [ 2177.914514] env[62684]: _type = "HttpNfcLease" [ 2177.914514] env[62684]: } obtained for vApp import into resource pool (val){ [ 2177.914514] env[62684]: value = "resgroup-9" [ 2177.914514] env[62684]: _type = "ResourcePool" [ 2177.914514] env[62684]: }. {{(pid=62684) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 2177.915513] env[62684]: DEBUG oslo_vmware.api [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the lease: (returnval){ [ 2177.915513] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52314ec2-eeb4-28ae-545c-f1be55e46f6d" [ 2177.915513] env[62684]: _type = "HttpNfcLease" [ 2177.915513] env[62684]: } to be ready. {{(pid=62684) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2177.926277] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2177.926277] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52314ec2-eeb4-28ae-545c-f1be55e46f6d" [ 2177.926277] env[62684]: _type = "HttpNfcLease" [ 2177.926277] env[62684]: } is initializing. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2177.972540] env[62684]: DEBUG oslo_concurrency.lockutils [req-2f80b517-c0fe-49ab-a784-46836bfde8ec req-7e77f9ea-9b40-4821-b1f0-19ba843e29d9 service nova] Releasing lock "refresh_cache-daf1486b-d5c2-4341-8a27-36eeeb08cd26" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2177.994689] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-968fe1f3-fd1f-4aac-898d-ec92a02e8e9d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.004876] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c533f6b-9efa-4b51-bab1-d910dafaced9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.051102] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aa65e30-edc7-4ebd-b173-99960eddab24 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.060747] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4434cd78-2baf-4083-9713-e4b7e9529b6c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.084265] env[62684]: DEBUG nova.compute.provider_tree [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2178.131393] env[62684]: DEBUG nova.network.neutron [None req-775b639c-e2d0-4d46-b17d-122275e75251 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Updating instance_info_cache with network_info: [{"id": "3fcb3920-5e10-45e2-865d-cc9b89a1e335", "address": "fa:16:3e:2f:71:d6", "network": {"id": "bd253713-4e81-4c94-9689-22b81e7f51b6", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-307001665-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd812751722143fabedfa986a2d98b59", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3fcb3920-5e", "ovs_interfaceid": "3fcb3920-5e10-45e2-865d-cc9b89a1e335", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2178.162469] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2178.340687] env[62684]: DEBUG oslo_vmware.api [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52085847-e483-6dff-7412-1f434cd35ce9, 'name': SearchDatastore_Task, 'duration_secs': 0.012748} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2178.341727] env[62684]: DEBUG oslo_concurrency.lockutils [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2178.342176] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2178.342883] env[62684]: DEBUG oslo_concurrency.lockutils [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2178.353546] env[62684]: DEBUG oslo_vmware.api [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a8ed91-d19c-d4b0-f85b-2ee8a0521783, 'name': SearchDatastore_Task, 'duration_secs': 0.016358} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2178.354354] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a70b07a-f74d-4093-ac8b-22b0f9162289 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.363082] env[62684]: DEBUG oslo_vmware.api [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2178.363082] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523e84e7-b8d4-1730-a33f-6c6a9ab86505" [ 2178.363082] env[62684]: _type = "Task" [ 2178.363082] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2178.374157] env[62684]: DEBUG oslo_vmware.api [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523e84e7-b8d4-1730-a33f-6c6a9ab86505, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2178.425178] env[62684]: DEBUG nova.network.neutron [-] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2178.426471] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2178.426471] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52314ec2-eeb4-28ae-545c-f1be55e46f6d" [ 2178.426471] env[62684]: _type = "HttpNfcLease" [ 2178.426471] env[62684]: } is initializing. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2178.591229] env[62684]: DEBUG nova.network.neutron [req-1f0a8a1b-1813-4e2b-a352-1545050024f6 req-6d38270b-5a56-4edc-8258-8349e6d8b850 service nova] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Updated VIF entry in instance network info cache for port fe223d4f-0585-454f-b724-0cdff1d2ceea. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2178.591622] env[62684]: DEBUG nova.network.neutron [req-1f0a8a1b-1813-4e2b-a352-1545050024f6 req-6d38270b-5a56-4edc-8258-8349e6d8b850 service nova] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Updating instance_info_cache with network_info: [{"id": "fe223d4f-0585-454f-b724-0cdff1d2ceea", "address": "fa:16:3e:9a:9c:00", "network": {"id": "e177c6d0-ddd5-4029-94af-c8f1b937dd9f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1344612161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27d04006afc747e19ad87238bfdbaad1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "171aeae0-6a27-44fc-bc3d-a2d5581fc702", "external-id": "nsx-vlan-transportzone-410", "segmentation_id": 410, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe223d4f-05", "ovs_interfaceid": "fe223d4f-0585-454f-b724-0cdff1d2ceea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2178.599040] env[62684]: DEBUG nova.compute.manager [req-f2fff1bc-1a8d-4240-83de-0733550cfb75 req-6fb7132d-84e2-49ff-abf7-33c250ed801d service nova] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Received event network-vif-deleted-f121aad3-8e11-4583-8919-c502deebb5e2 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2178.605058] env[62684]: ERROR nova.scheduler.client.report [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [req-628a4c0b-4015-4dff-8154-504616956a09] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c23c281e-ec1f-4876-972e-a98655f2084f. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-628a4c0b-4015-4dff-8154-504616956a09"}]} [ 2178.620762] env[62684]: DEBUG nova.scheduler.client.report [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Refreshing inventories for resource provider c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2178.632876] env[62684]: DEBUG nova.scheduler.client.report [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Updating ProviderTree inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2178.633112] env[62684]: DEBUG nova.compute.provider_tree [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2178.635506] env[62684]: DEBUG oslo_concurrency.lockutils [None req-775b639c-e2d0-4d46-b17d-122275e75251 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Releasing lock "refresh_cache-2baabe7a-ed33-4cef-9acc-a7b804610b0a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2178.636141] env[62684]: DEBUG nova.objects.instance [None req-775b639c-e2d0-4d46-b17d-122275e75251 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lazy-loading 'flavor' on Instance uuid 2baabe7a-ed33-4cef-9acc-a7b804610b0a {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2178.646666] env[62684]: DEBUG nova.scheduler.client.report [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Refreshing aggregate associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, aggregates: None {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2178.663269] env[62684]: DEBUG nova.scheduler.client.report [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Refreshing trait associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2178.876187] env[62684]: DEBUG oslo_vmware.api [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523e84e7-b8d4-1730-a33f-6c6a9ab86505, 'name': SearchDatastore_Task, 'duration_secs': 0.017268} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2178.877526] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2178.877526] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] f4fab142-8066-43c1-abaa-a9f66775114c/f4fab142-8066-43c1-abaa-a9f66775114c.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2178.877526] env[62684]: DEBUG oslo_concurrency.lockutils [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2178.877526] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2178.877713] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1ec61602-8989-4baf-9e34-59527dd667da {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.879837] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4bae42a9-e949-4d52-be9d-82d66695c88e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.886525] env[62684]: DEBUG oslo_vmware.api [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2178.886525] env[62684]: value = "task-2053492" [ 2178.886525] env[62684]: _type = "Task" [ 2178.886525] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2178.893284] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2178.893485] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2178.894733] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e1a2692-826b-400d-9b33-a6f9e1586ec6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.900101] env[62684]: DEBUG oslo_vmware.api [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053492, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2178.903119] env[62684]: DEBUG oslo_vmware.api [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2178.903119] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ad533d-5429-298b-8cd3-0dfdc94f9a97" [ 2178.903119] env[62684]: _type = "Task" [ 2178.903119] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2178.907441] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d424bd8-1610-4b49-9450-e2240a2b6c08 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.919434] env[62684]: DEBUG oslo_concurrency.lockutils [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "ba12fa9a-10e3-4624-98b5-4ff7365e1940" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2178.919704] env[62684]: DEBUG oslo_concurrency.lockutils [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "ba12fa9a-10e3-4624-98b5-4ff7365e1940" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2178.920825] env[62684]: DEBUG oslo_vmware.api [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ad533d-5429-298b-8cd3-0dfdc94f9a97, 'name': SearchDatastore_Task} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2178.925663] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f6c5f0f-36a2-48fc-b0c6-f2f54e1c49c2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.929028] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f16135e3-2e7b-4a94-bd0c-6e77c70043e2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.932029] env[62684]: INFO nova.compute.manager [-] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Took 1.34 seconds to deallocate network for instance. [ 2178.965093] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2178.965093] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52314ec2-eeb4-28ae-545c-f1be55e46f6d" [ 2178.965093] env[62684]: _type = "HttpNfcLease" [ 2178.965093] env[62684]: } is ready. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2178.966789] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b59060fe-cd32-4a28-a1c7-3638087d7074 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.969440] env[62684]: DEBUG oslo_vmware.rw_handles [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2178.969440] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52314ec2-eeb4-28ae-545c-f1be55e46f6d" [ 2178.969440] env[62684]: _type = "HttpNfcLease" [ 2178.969440] env[62684]: }. {{(pid=62684) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 2178.970091] env[62684]: DEBUG oslo_vmware.api [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2178.970091] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f62dec-912a-a2cf-0ba6-891efe837f09" [ 2178.970091] env[62684]: _type = "Task" [ 2178.970091] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2178.970771] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-651b295a-b13a-4b19-961e-a60fb261f759 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.980097] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-808ce87b-81d9-4358-b8ee-2ddde196edf3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.990377] env[62684]: DEBUG oslo_vmware.api [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f62dec-912a-a2cf-0ba6-891efe837f09, 'name': SearchDatastore_Task, 'duration_secs': 0.012594} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2178.990633] env[62684]: DEBUG oslo_vmware.rw_handles [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d8375c-8b7f-cfae-9c41-ab5b85467dac/disk-0.vmdk from lease info. {{(pid=62684) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2178.990807] env[62684]: DEBUG oslo_vmware.rw_handles [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Creating HTTP connection to write to file with size = 31661056 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d8375c-8b7f-cfae-9c41-ab5b85467dac/disk-0.vmdk. {{(pid=62684) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2178.992467] env[62684]: DEBUG oslo_concurrency.lockutils [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2178.992712] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 99a9653c-7221-4495-be5f-5441dc8da0f4/99a9653c-7221-4495-be5f-5441dc8da0f4.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2178.996238] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a052832a-cec3-4b8e-8066-71dbce3f04eb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.055328] env[62684]: DEBUG nova.compute.provider_tree [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2179.062138] env[62684]: DEBUG oslo_vmware.api [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2179.062138] env[62684]: value = "task-2053493" [ 2179.062138] env[62684]: _type = "Task" [ 2179.062138] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2179.069870] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-97496369-c3a2-4693-98a1-93d2ea3f62da {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.082384] env[62684]: DEBUG oslo_vmware.api [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053493, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2179.095089] env[62684]: DEBUG oslo_concurrency.lockutils [req-1f0a8a1b-1813-4e2b-a352-1545050024f6 req-6d38270b-5a56-4edc-8258-8349e6d8b850 service nova] Releasing lock "refresh_cache-99a9653c-7221-4495-be5f-5441dc8da0f4" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2179.142475] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7a5c4ac-f373-409b-897d-f4d9aa911f95 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.174713] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-775b639c-e2d0-4d46-b17d-122275e75251 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2179.175165] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-54d7ab06-eeb1-474b-aca9-63eb6aa588b8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.185364] env[62684]: DEBUG oslo_vmware.api [None req-775b639c-e2d0-4d46-b17d-122275e75251 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 2179.185364] env[62684]: value = "task-2053494" [ 2179.185364] env[62684]: _type = "Task" [ 2179.185364] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2179.196060] env[62684]: DEBUG oslo_vmware.api [None req-775b639c-e2d0-4d46-b17d-122275e75251 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053494, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2179.397636] env[62684]: DEBUG oslo_vmware.api [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053492, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.500835} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2179.397811] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] f4fab142-8066-43c1-abaa-a9f66775114c/f4fab142-8066-43c1-abaa-a9f66775114c.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2179.398051] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2179.398326] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e4edd484-97d8-47f2-a367-58f20ec4c2fd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.404923] env[62684]: DEBUG oslo_vmware.api [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2179.404923] env[62684]: value = "task-2053495" [ 2179.404923] env[62684]: _type = "Task" [ 2179.404923] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2179.413119] env[62684]: DEBUG oslo_vmware.api [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053495, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2179.423478] env[62684]: DEBUG nova.compute.manager [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2179.463778] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e0601ab0-81d2-4626-ac6d-5db833ef6fbe tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2179.575835] env[62684]: DEBUG oslo_vmware.api [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053493, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2179.590814] env[62684]: DEBUG nova.scheduler.client.report [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Updated inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f with generation 144 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2179.591161] env[62684]: DEBUG nova.compute.provider_tree [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Updating resource provider c23c281e-ec1f-4876-972e-a98655f2084f generation from 144 to 145 during operation: update_inventory {{(pid=62684) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2179.591447] env[62684]: DEBUG nova.compute.provider_tree [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2179.697987] env[62684]: DEBUG oslo_vmware.api [None req-775b639c-e2d0-4d46-b17d-122275e75251 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053494, 'name': PowerOffVM_Task, 'duration_secs': 0.29465} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2179.698370] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-775b639c-e2d0-4d46-b17d-122275e75251 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2179.704539] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-775b639c-e2d0-4d46-b17d-122275e75251 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Reconfiguring VM instance instance-0000003f to detach disk 2002 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2179.704968] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d98863fc-b16c-4674-b05b-1d4147caad0b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.725833] env[62684]: DEBUG oslo_vmware.api [None req-775b639c-e2d0-4d46-b17d-122275e75251 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 2179.725833] env[62684]: value = "task-2053496" [ 2179.725833] env[62684]: _type = "Task" [ 2179.725833] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2179.736268] env[62684]: DEBUG oslo_vmware.api [None req-775b639c-e2d0-4d46-b17d-122275e75251 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053496, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2179.918761] env[62684]: DEBUG oslo_vmware.api [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053495, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076697} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2179.921190] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2179.922221] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1faa9ef0-4412-4d66-899b-f1d95c5795a6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.949837] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Reconfiguring VM instance instance-00000060 to attach disk [datastore2] f4fab142-8066-43c1-abaa-a9f66775114c/f4fab142-8066-43c1-abaa-a9f66775114c.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2179.957801] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c64ed5b7-fa96-4898-accf-902812b1b010 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.981834] env[62684]: DEBUG oslo_vmware.api [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2179.981834] env[62684]: value = "task-2053497" [ 2179.981834] env[62684]: _type = "Task" [ 2179.981834] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2179.992677] env[62684]: DEBUG oslo_vmware.api [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053497, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2179.993714] env[62684]: DEBUG oslo_concurrency.lockutils [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2179.995481] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2cf58caa-82a8-447a-a1a3-7c18fd096f3f tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "interface-f037d6b2-2082-4611-985e-b9a077eb8250-e60b8515-c469-46d3-945b-bc843ccffc44" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2179.995715] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2cf58caa-82a8-447a-a1a3-7c18fd096f3f tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "interface-f037d6b2-2082-4611-985e-b9a077eb8250-e60b8515-c469-46d3-945b-bc843ccffc44" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2179.996089] env[62684]: DEBUG nova.objects.instance [None req-2cf58caa-82a8-447a-a1a3-7c18fd096f3f tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lazy-loading 'flavor' on Instance uuid f037d6b2-2082-4611-985e-b9a077eb8250 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2180.077010] env[62684]: DEBUG oslo_vmware.api [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053493, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.743735} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2180.077296] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 99a9653c-7221-4495-be5f-5441dc8da0f4/99a9653c-7221-4495-be5f-5441dc8da0f4.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2180.077561] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2180.077763] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ae3a8ad4-c1c1-467a-abdf-64e1bbd62f28 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.084444] env[62684]: DEBUG oslo_vmware.api [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2180.084444] env[62684]: value = "task-2053498" [ 2180.084444] env[62684]: _type = "Task" [ 2180.084444] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2180.092445] env[62684]: DEBUG oslo_vmware.api [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053498, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2180.097331] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 4.873s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2180.100103] env[62684]: DEBUG oslo_concurrency.lockutils [None req-495243ee-91a6-4e98-baaa-85e87b56e7f6 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.602s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2180.100103] env[62684]: DEBUG nova.objects.instance [None req-495243ee-91a6-4e98-baaa-85e87b56e7f6 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Lazy-loading 'resources' on Instance uuid 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2180.116037] env[62684]: INFO nova.scheduler.client.report [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Deleted allocations for instance 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd [ 2180.239249] env[62684]: DEBUG oslo_vmware.api [None req-775b639c-e2d0-4d46-b17d-122275e75251 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053496, 'name': ReconfigVM_Task, 'duration_secs': 0.253607} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2180.241245] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-775b639c-e2d0-4d46-b17d-122275e75251 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Reconfigured VM instance instance-0000003f to detach disk 2002 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2180.241403] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-775b639c-e2d0-4d46-b17d-122275e75251 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2180.241668] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dd710d92-f60b-4c27-ac28-7a64799793ad {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.250448] env[62684]: DEBUG oslo_vmware.api [None req-775b639c-e2d0-4d46-b17d-122275e75251 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 2180.250448] env[62684]: value = "task-2053499" [ 2180.250448] env[62684]: _type = "Task" [ 2180.250448] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2180.259679] env[62684]: DEBUG oslo_vmware.api [None req-775b639c-e2d0-4d46-b17d-122275e75251 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053499, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2180.493993] env[62684]: DEBUG oslo_vmware.api [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053497, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2180.596325] env[62684]: DEBUG oslo_vmware.api [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053498, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071732} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2180.596618] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2180.599012] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-289c3d5b-f595-4b63-aba6-5d787246f293 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.627397] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] 99a9653c-7221-4495-be5f-5441dc8da0f4/99a9653c-7221-4495-be5f-5441dc8da0f4.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2180.631108] env[62684]: DEBUG nova.objects.instance [None req-2cf58caa-82a8-447a-a1a3-7c18fd096f3f tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lazy-loading 'pci_requests' on Instance uuid f037d6b2-2082-4611-985e-b9a077eb8250 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2180.637724] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-69003f24-f066-4f0f-88bd-8deb8ab4c41e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.653683] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e88dd332-e099-4486-894f-6c49e90ff8de tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Lock "7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.038s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2180.656931] env[62684]: DEBUG oslo_vmware.rw_handles [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Completed reading data from the image iterator. {{(pid=62684) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2180.657157] env[62684]: DEBUG oslo_vmware.rw_handles [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d8375c-8b7f-cfae-9c41-ab5b85467dac/disk-0.vmdk. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2180.657817] env[62684]: DEBUG nova.objects.base [None req-2cf58caa-82a8-447a-a1a3-7c18fd096f3f tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=62684) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2180.658031] env[62684]: DEBUG nova.network.neutron [None req-2cf58caa-82a8-447a-a1a3-7c18fd096f3f tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2180.661128] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5a1c29d-eb75-425d-8995-24c7fd2082ca {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.670422] env[62684]: DEBUG oslo_vmware.rw_handles [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d8375c-8b7f-cfae-9c41-ab5b85467dac/disk-0.vmdk is in state: ready. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2180.670501] env[62684]: DEBUG oslo_vmware.rw_handles [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d8375c-8b7f-cfae-9c41-ab5b85467dac/disk-0.vmdk. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 2180.672164] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-b468aef9-242f-4615-bc69-430654aea037 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.673954] env[62684]: DEBUG oslo_vmware.api [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2180.673954] env[62684]: value = "task-2053500" [ 2180.673954] env[62684]: _type = "Task" [ 2180.673954] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2180.684293] env[62684]: DEBUG oslo_vmware.api [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053500, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2180.745397] env[62684]: DEBUG nova.policy [None req-2cf58caa-82a8-447a-a1a3-7c18fd096f3f tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e957449ae9d24bdaba38b3db704d3d61', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5cb4900a999e467bafdfd1fb407a82f4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2180.761797] env[62684]: DEBUG oslo_vmware.api [None req-775b639c-e2d0-4d46-b17d-122275e75251 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053499, 'name': PowerOnVM_Task, 'duration_secs': 0.461251} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2180.762086] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-775b639c-e2d0-4d46-b17d-122275e75251 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2180.762325] env[62684]: DEBUG nova.compute.manager [None req-775b639c-e2d0-4d46-b17d-122275e75251 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2180.765385] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a6829e3-2f01-41bc-8a61-9f54d603ef8c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.888759] env[62684]: DEBUG oslo_vmware.rw_handles [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d8375c-8b7f-cfae-9c41-ab5b85467dac/disk-0.vmdk. {{(pid=62684) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 2180.889061] env[62684]: INFO nova.virt.vmwareapi.images [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Downloaded image file data b5ede0c6-ad0d-4c75-b005-a332dfdc71df [ 2180.889912] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-349263e7-0630-48d9-9f24-f2c285a9a057 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.910892] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-95e0e07b-06d4-48a6-b3f8-c25cc43089d8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.912798] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b568f83f-813a-46d4-9e5e-129d22f6820d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.919612] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aac23ba7-af8e-47cd-873e-c96803bf06ef {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.951232] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0df374f-62d7-4b51-8263-fb570fda93a5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.954365] env[62684]: INFO nova.virt.vmwareapi.images [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] The imported VM was unregistered [ 2180.956635] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Caching image {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2180.956902] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Creating directory with path [datastore2] devstack-image-cache_base/b5ede0c6-ad0d-4c75-b005-a332dfdc71df {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2180.957186] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-db458378-b1f6-419a-8789-7eb0fc147214 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.963384] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dd4eb93-c026-4d5a-b796-a10d3c74fd0c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.968554] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Created directory with path [datastore2] devstack-image-cache_base/b5ede0c6-ad0d-4c75-b005-a332dfdc71df {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2180.968766] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_096271ae-590d-4396-ad3d-1c6f8160d4fa/OSTACK_IMG_096271ae-590d-4396-ad3d-1c6f8160d4fa.vmdk to [datastore2] devstack-image-cache_base/b5ede0c6-ad0d-4c75-b005-a332dfdc71df/b5ede0c6-ad0d-4c75-b005-a332dfdc71df.vmdk. {{(pid=62684) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 2180.976288] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-da0ce638-d4b6-4768-8c9d-a2eba55ba1ff {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.978629] env[62684]: DEBUG nova.compute.provider_tree [None req-495243ee-91a6-4e98-baaa-85e87b56e7f6 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2180.984150] env[62684]: DEBUG oslo_vmware.api [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 2180.984150] env[62684]: value = "task-2053502" [ 2180.984150] env[62684]: _type = "Task" [ 2180.984150] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2180.995693] env[62684]: DEBUG oslo_vmware.api [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053497, 'name': ReconfigVM_Task, 'duration_secs': 0.842429} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2180.998872] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Reconfigured VM instance instance-00000060 to attach disk [datastore2] f4fab142-8066-43c1-abaa-a9f66775114c/f4fab142-8066-43c1-abaa-a9f66775114c.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2180.999547] env[62684]: DEBUG oslo_vmware.api [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053502, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2181.000703] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b31fc17d-c248-4132-a5ee-4ef193be4ed6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.011541] env[62684]: DEBUG oslo_vmware.api [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2181.011541] env[62684]: value = "task-2053503" [ 2181.011541] env[62684]: _type = "Task" [ 2181.011541] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2181.020277] env[62684]: DEBUG oslo_vmware.api [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053503, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2181.183446] env[62684]: DEBUG oslo_vmware.api [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053500, 'name': ReconfigVM_Task, 'duration_secs': 0.37353} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2181.183795] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Reconfigured VM instance instance-0000005f to attach disk [datastore2] 99a9653c-7221-4495-be5f-5441dc8da0f4/99a9653c-7221-4495-be5f-5441dc8da0f4.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2181.184401] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c5a49dc0-b25e-4e57-885e-20a29a8b2b76 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.190965] env[62684]: DEBUG oslo_vmware.api [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2181.190965] env[62684]: value = "task-2053504" [ 2181.190965] env[62684]: _type = "Task" [ 2181.190965] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2181.198475] env[62684]: DEBUG oslo_vmware.api [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053504, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2181.496069] env[62684]: DEBUG oslo_vmware.api [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053502, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2181.506614] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e5e523ca-4565-4a55-8b68-373620ed36d8 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Acquiring lock "3a967adf-8c46-4787-b1d1-4ed701399576" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2181.506894] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e5e523ca-4565-4a55-8b68-373620ed36d8 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Lock "3a967adf-8c46-4787-b1d1-4ed701399576" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2181.507130] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e5e523ca-4565-4a55-8b68-373620ed36d8 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Acquiring lock "3a967adf-8c46-4787-b1d1-4ed701399576-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2181.507516] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e5e523ca-4565-4a55-8b68-373620ed36d8 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Lock "3a967adf-8c46-4787-b1d1-4ed701399576-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2181.507516] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e5e523ca-4565-4a55-8b68-373620ed36d8 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Lock "3a967adf-8c46-4787-b1d1-4ed701399576-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2181.510018] env[62684]: DEBUG nova.scheduler.client.report [None req-495243ee-91a6-4e98-baaa-85e87b56e7f6 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Updated inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f with generation 145 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2181.510275] env[62684]: DEBUG nova.compute.provider_tree [None req-495243ee-91a6-4e98-baaa-85e87b56e7f6 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Updating resource provider c23c281e-ec1f-4876-972e-a98655f2084f generation from 145 to 146 during operation: update_inventory {{(pid=62684) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2181.510461] env[62684]: DEBUG nova.compute.provider_tree [None req-495243ee-91a6-4e98-baaa-85e87b56e7f6 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2181.513828] env[62684]: INFO nova.compute.manager [None req-e5e523ca-4565-4a55-8b68-373620ed36d8 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Terminating instance [ 2181.518620] env[62684]: DEBUG nova.compute.manager [None req-e5e523ca-4565-4a55-8b68-373620ed36d8 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2181.518845] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e5e523ca-4565-4a55-8b68-373620ed36d8 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2181.519597] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26e18145-0b1b-4504-b77f-2a2a9dc515a1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.527582] env[62684]: DEBUG oslo_vmware.api [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053503, 'name': Rename_Task, 'duration_secs': 0.162836} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2181.529537] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2181.530924] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5e523ca-4565-4a55-8b68-373620ed36d8 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2181.531430] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7f86cebf-fee6-4ca4-961a-53576322d875 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.532867] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-829e1046-daf6-45a6-8207-742d857b87b2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.538251] env[62684]: DEBUG oslo_vmware.api [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2181.538251] env[62684]: value = "task-2053505" [ 2181.538251] env[62684]: _type = "Task" [ 2181.538251] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2181.542371] env[62684]: DEBUG oslo_vmware.api [None req-e5e523ca-4565-4a55-8b68-373620ed36d8 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for the task: (returnval){ [ 2181.542371] env[62684]: value = "task-2053506" [ 2181.542371] env[62684]: _type = "Task" [ 2181.542371] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2181.551322] env[62684]: DEBUG oslo_vmware.api [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053505, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2181.554126] env[62684]: DEBUG oslo_vmware.api [None req-e5e523ca-4565-4a55-8b68-373620ed36d8 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053506, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2181.701788] env[62684]: DEBUG oslo_vmware.api [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053504, 'name': Rename_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2181.996433] env[62684]: DEBUG oslo_vmware.api [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053502, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2182.019560] env[62684]: DEBUG oslo_concurrency.lockutils [None req-495243ee-91a6-4e98-baaa-85e87b56e7f6 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.920s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2182.022032] env[62684]: DEBUG oslo_concurrency.lockutils [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.219s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2182.022439] env[62684]: DEBUG nova.objects.instance [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lazy-loading 'resources' on Instance uuid aebbc2cc-8973-4907-9ec8-085027fd7ca3 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2182.050782] env[62684]: INFO nova.scheduler.client.report [None req-495243ee-91a6-4e98-baaa-85e87b56e7f6 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Deleted allocations for instance 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5 [ 2182.052242] env[62684]: DEBUG oslo_vmware.api [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053505, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2182.064300] env[62684]: DEBUG oslo_vmware.api [None req-e5e523ca-4565-4a55-8b68-373620ed36d8 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053506, 'name': PowerOffVM_Task, 'duration_secs': 0.512568} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2182.064634] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5e523ca-4565-4a55-8b68-373620ed36d8 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2182.064888] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e5e523ca-4565-4a55-8b68-373620ed36d8 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2182.065186] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d4bfceb3-7c6a-4508-b0dc-bd9dc2566587 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.212190] env[62684]: DEBUG oslo_vmware.api [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053504, 'name': Rename_Task, 'duration_secs': 0.934295} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2182.212945] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2182.213748] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-863c15a9-cf99-4091-b538-88b2e040de83 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.225644] env[62684]: DEBUG oslo_vmware.api [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2182.225644] env[62684]: value = "task-2053508" [ 2182.225644] env[62684]: _type = "Task" [ 2182.225644] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2182.235583] env[62684]: DEBUG nova.compute.manager [req-a5b8ea4d-1741-4bcb-8711-7e39c7fa7247 req-68529757-38f2-4a5d-b6ef-3aaeb163e1f5 service nova] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Received event network-vif-plugged-e60b8515-c469-46d3-945b-bc843ccffc44 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2182.236058] env[62684]: DEBUG oslo_concurrency.lockutils [req-a5b8ea4d-1741-4bcb-8711-7e39c7fa7247 req-68529757-38f2-4a5d-b6ef-3aaeb163e1f5 service nova] Acquiring lock "f037d6b2-2082-4611-985e-b9a077eb8250-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2182.236500] env[62684]: DEBUG oslo_concurrency.lockutils [req-a5b8ea4d-1741-4bcb-8711-7e39c7fa7247 req-68529757-38f2-4a5d-b6ef-3aaeb163e1f5 service nova] Lock "f037d6b2-2082-4611-985e-b9a077eb8250-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2182.236847] env[62684]: DEBUG oslo_concurrency.lockutils [req-a5b8ea4d-1741-4bcb-8711-7e39c7fa7247 req-68529757-38f2-4a5d-b6ef-3aaeb163e1f5 service nova] Lock "f037d6b2-2082-4611-985e-b9a077eb8250-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2182.237219] env[62684]: DEBUG nova.compute.manager [req-a5b8ea4d-1741-4bcb-8711-7e39c7fa7247 req-68529757-38f2-4a5d-b6ef-3aaeb163e1f5 service nova] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] No waiting events found dispatching network-vif-plugged-e60b8515-c469-46d3-945b-bc843ccffc44 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2182.238354] env[62684]: WARNING nova.compute.manager [req-a5b8ea4d-1741-4bcb-8711-7e39c7fa7247 req-68529757-38f2-4a5d-b6ef-3aaeb163e1f5 service nova] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Received unexpected event network-vif-plugged-e60b8515-c469-46d3-945b-bc843ccffc44 for instance with vm_state active and task_state None. [ 2182.248624] env[62684]: DEBUG oslo_vmware.api [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053508, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2182.335447] env[62684]: DEBUG nova.network.neutron [None req-2cf58caa-82a8-447a-a1a3-7c18fd096f3f tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Successfully updated port: e60b8515-c469-46d3-945b-bc843ccffc44 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2182.360596] env[62684]: DEBUG nova.compute.manager [req-34279c88-d1ff-4e8a-9e3d-94f85d550492 req-5f4b5c4c-f9a0-4c12-9a3b-d44350255693 service nova] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Received event network-changed-3fcb3920-5e10-45e2-865d-cc9b89a1e335 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2182.360755] env[62684]: DEBUG nova.compute.manager [req-34279c88-d1ff-4e8a-9e3d-94f85d550492 req-5f4b5c4c-f9a0-4c12-9a3b-d44350255693 service nova] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Refreshing instance network info cache due to event network-changed-3fcb3920-5e10-45e2-865d-cc9b89a1e335. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2182.361244] env[62684]: DEBUG oslo_concurrency.lockutils [req-34279c88-d1ff-4e8a-9e3d-94f85d550492 req-5f4b5c4c-f9a0-4c12-9a3b-d44350255693 service nova] Acquiring lock "refresh_cache-2baabe7a-ed33-4cef-9acc-a7b804610b0a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2182.361476] env[62684]: DEBUG oslo_concurrency.lockutils [req-34279c88-d1ff-4e8a-9e3d-94f85d550492 req-5f4b5c4c-f9a0-4c12-9a3b-d44350255693 service nova] Acquired lock "refresh_cache-2baabe7a-ed33-4cef-9acc-a7b804610b0a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2182.361688] env[62684]: DEBUG nova.network.neutron [req-34279c88-d1ff-4e8a-9e3d-94f85d550492 req-5f4b5c4c-f9a0-4c12-9a3b-d44350255693 service nova] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Refreshing network info cache for port 3fcb3920-5e10-45e2-865d-cc9b89a1e335 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2182.390409] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e5e523ca-4565-4a55-8b68-373620ed36d8 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2182.390735] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e5e523ca-4565-4a55-8b68-373620ed36d8 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2182.391043] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5e523ca-4565-4a55-8b68-373620ed36d8 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Deleting the datastore file [datastore2] 3a967adf-8c46-4787-b1d1-4ed701399576 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2182.391312] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7aa2b4b0-20a1-4046-b33f-0127da03727b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.402095] env[62684]: DEBUG oslo_vmware.api [None req-e5e523ca-4565-4a55-8b68-373620ed36d8 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for the task: (returnval){ [ 2182.402095] env[62684]: value = "task-2053509" [ 2182.402095] env[62684]: _type = "Task" [ 2182.402095] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2182.414227] env[62684]: DEBUG oslo_vmware.api [None req-e5e523ca-4565-4a55-8b68-373620ed36d8 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053509, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2182.499354] env[62684]: DEBUG oslo_vmware.api [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053502, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2182.556739] env[62684]: DEBUG oslo_vmware.api [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053505, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2182.562376] env[62684]: DEBUG oslo_concurrency.lockutils [None req-495243ee-91a6-4e98-baaa-85e87b56e7f6 tempest-ImagesOneServerNegativeTestJSON-949560501 tempest-ImagesOneServerNegativeTestJSON-949560501-project-member] Lock "7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.188s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2182.764091] env[62684]: DEBUG oslo_vmware.api [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053508, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2182.803342] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4572552f-8065-4686-8f7b-2d33be2d501a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.814838] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-336c85cc-7381-48b1-a3b8-8e2c599b3ac7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.867294] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2cf58caa-82a8-447a-a1a3-7c18fd096f3f tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "refresh_cache-f037d6b2-2082-4611-985e-b9a077eb8250" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2182.867610] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2cf58caa-82a8-447a-a1a3-7c18fd096f3f tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquired lock "refresh_cache-f037d6b2-2082-4611-985e-b9a077eb8250" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2182.867894] env[62684]: DEBUG nova.network.neutron [None req-2cf58caa-82a8-447a-a1a3-7c18fd096f3f tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2182.875358] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2432bcdf-7801-4e1a-b8f4-7146a1e67103 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.891504] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22c44e35-5b88-42d7-bcee-75f37b5e3917 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.914955] env[62684]: DEBUG nova.compute.provider_tree [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2182.930758] env[62684]: DEBUG oslo_vmware.api [None req-e5e523ca-4565-4a55-8b68-373620ed36d8 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053509, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2183.000930] env[62684]: DEBUG oslo_vmware.api [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053502, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2183.053946] env[62684]: DEBUG oslo_vmware.api [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053505, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2183.139735] env[62684]: DEBUG nova.network.neutron [req-34279c88-d1ff-4e8a-9e3d-94f85d550492 req-5f4b5c4c-f9a0-4c12-9a3b-d44350255693 service nova] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Updated VIF entry in instance network info cache for port 3fcb3920-5e10-45e2-865d-cc9b89a1e335. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2183.140462] env[62684]: DEBUG nova.network.neutron [req-34279c88-d1ff-4e8a-9e3d-94f85d550492 req-5f4b5c4c-f9a0-4c12-9a3b-d44350255693 service nova] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Updating instance_info_cache with network_info: [{"id": "3fcb3920-5e10-45e2-865d-cc9b89a1e335", "address": "fa:16:3e:2f:71:d6", "network": {"id": "bd253713-4e81-4c94-9689-22b81e7f51b6", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-307001665-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd812751722143fabedfa986a2d98b59", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3fcb3920-5e", "ovs_interfaceid": "3fcb3920-5e10-45e2-865d-cc9b89a1e335", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2183.239762] env[62684]: DEBUG oslo_vmware.api [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053508, 'name': PowerOnVM_Task} progress is 71%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2183.425359] env[62684]: DEBUG nova.scheduler.client.report [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2183.428964] env[62684]: DEBUG oslo_vmware.api [None req-e5e523ca-4565-4a55-8b68-373620ed36d8 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053509, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2183.430047] env[62684]: WARNING nova.network.neutron [None req-2cf58caa-82a8-447a-a1a3-7c18fd096f3f tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] bbb78a3c-6804-4aae-9107-4ae6699c305d already exists in list: networks containing: ['bbb78a3c-6804-4aae-9107-4ae6699c305d']. ignoring it [ 2183.501877] env[62684]: DEBUG oslo_vmware.api [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053502, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2183.552907] env[62684]: DEBUG oslo_vmware.api [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053505, 'name': PowerOnVM_Task, 'duration_secs': 1.736059} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2183.553408] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2183.553654] env[62684]: INFO nova.compute.manager [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Took 13.28 seconds to spawn the instance on the hypervisor. [ 2183.553846] env[62684]: DEBUG nova.compute.manager [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2183.554671] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b7a2b6e-a511-4a49-9bc5-e4e01ed526e4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.644016] env[62684]: DEBUG oslo_concurrency.lockutils [req-34279c88-d1ff-4e8a-9e3d-94f85d550492 req-5f4b5c4c-f9a0-4c12-9a3b-d44350255693 service nova] Releasing lock "refresh_cache-2baabe7a-ed33-4cef-9acc-a7b804610b0a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2183.739928] env[62684]: DEBUG oslo_vmware.api [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053508, 'name': PowerOnVM_Task} progress is 76%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2183.925067] env[62684]: DEBUG oslo_vmware.api [None req-e5e523ca-4565-4a55-8b68-373620ed36d8 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053509, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2183.933162] env[62684]: DEBUG oslo_concurrency.lockutils [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.911s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2183.935647] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6e8cbf2e-0f5d-4a8f-8276-9ab609c48130 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.980s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2183.935901] env[62684]: DEBUG nova.objects.instance [None req-6e8cbf2e-0f5d-4a8f-8276-9ab609c48130 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Lazy-loading 'resources' on Instance uuid 6b461482-0606-4af3-98a2-88c0318d1a69 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2183.958341] env[62684]: INFO nova.scheduler.client.report [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Deleted allocations for instance aebbc2cc-8973-4907-9ec8-085027fd7ca3 [ 2183.965858] env[62684]: DEBUG nova.network.neutron [None req-2cf58caa-82a8-447a-a1a3-7c18fd096f3f tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Updating instance_info_cache with network_info: [{"id": "b5747949-00d7-4815-9080-52285a6a8813", "address": "fa:16:3e:fd:34:0c", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.240", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5747949-00", "ovs_interfaceid": "b5747949-00d7-4815-9080-52285a6a8813", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e60b8515-c469-46d3-945b-bc843ccffc44", "address": "fa:16:3e:f9:ed:d2", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape60b8515-c4", "ovs_interfaceid": "e60b8515-c469-46d3-945b-bc843ccffc44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2184.001994] env[62684]: DEBUG oslo_vmware.api [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053502, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2184.076663] env[62684]: INFO nova.compute.manager [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Took 22.22 seconds to build instance. [ 2184.242057] env[62684]: DEBUG oslo_vmware.api [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053508, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2184.425958] env[62684]: DEBUG oslo_vmware.api [None req-e5e523ca-4565-4a55-8b68-373620ed36d8 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Task: {'id': task-2053509, 'name': DeleteDatastoreFile_Task, 'duration_secs': 2.005473} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2184.425958] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5e523ca-4565-4a55-8b68-373620ed36d8 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2184.425958] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e5e523ca-4565-4a55-8b68-373620ed36d8 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2184.425958] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e5e523ca-4565-4a55-8b68-373620ed36d8 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2184.426319] env[62684]: INFO nova.compute.manager [None req-e5e523ca-4565-4a55-8b68-373620ed36d8 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Took 2.91 seconds to destroy the instance on the hypervisor. [ 2184.426719] env[62684]: DEBUG oslo.service.loopingcall [None req-e5e523ca-4565-4a55-8b68-373620ed36d8 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2184.426827] env[62684]: DEBUG nova.compute.manager [-] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2184.426877] env[62684]: DEBUG nova.network.neutron [-] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2184.469027] env[62684]: DEBUG oslo_concurrency.lockutils [None req-717df1b7-f307-458f-9de5-bb74fb3c3d52 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "aebbc2cc-8973-4907-9ec8-085027fd7ca3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.863s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2184.469027] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2cf58caa-82a8-447a-a1a3-7c18fd096f3f tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Releasing lock "refresh_cache-f037d6b2-2082-4611-985e-b9a077eb8250" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2184.469698] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2cf58caa-82a8-447a-a1a3-7c18fd096f3f tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "f037d6b2-2082-4611-985e-b9a077eb8250" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2184.470036] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2cf58caa-82a8-447a-a1a3-7c18fd096f3f tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquired lock "f037d6b2-2082-4611-985e-b9a077eb8250" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2184.471240] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f090a5ca-9846-4cf4-a911-b485decbbb9f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.477790] env[62684]: DEBUG nova.compute.manager [req-e4391b04-ffa5-4d65-8171-7664e2a06382 req-fc1e17fe-b758-4c00-8931-0381bb1d1e97 service nova] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Received event network-changed-e60b8515-c469-46d3-945b-bc843ccffc44 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2184.478377] env[62684]: DEBUG nova.compute.manager [req-e4391b04-ffa5-4d65-8171-7664e2a06382 req-fc1e17fe-b758-4c00-8931-0381bb1d1e97 service nova] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Refreshing instance network info cache due to event network-changed-e60b8515-c469-46d3-945b-bc843ccffc44. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2184.479076] env[62684]: DEBUG oslo_concurrency.lockutils [req-e4391b04-ffa5-4d65-8171-7664e2a06382 req-fc1e17fe-b758-4c00-8931-0381bb1d1e97 service nova] Acquiring lock "refresh_cache-f037d6b2-2082-4611-985e-b9a077eb8250" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2184.479649] env[62684]: DEBUG oslo_concurrency.lockutils [req-e4391b04-ffa5-4d65-8171-7664e2a06382 req-fc1e17fe-b758-4c00-8931-0381bb1d1e97 service nova] Acquired lock "refresh_cache-f037d6b2-2082-4611-985e-b9a077eb8250" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2184.480107] env[62684]: DEBUG nova.network.neutron [req-e4391b04-ffa5-4d65-8171-7664e2a06382 req-fc1e17fe-b758-4c00-8931-0381bb1d1e97 service nova] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Refreshing network info cache for port e60b8515-c469-46d3-945b-bc843ccffc44 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2184.501884] env[62684]: DEBUG nova.virt.hardware [None req-2cf58caa-82a8-447a-a1a3-7c18fd096f3f tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2184.502531] env[62684]: DEBUG nova.virt.hardware [None req-2cf58caa-82a8-447a-a1a3-7c18fd096f3f tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2184.502531] env[62684]: DEBUG nova.virt.hardware [None req-2cf58caa-82a8-447a-a1a3-7c18fd096f3f tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2184.502746] env[62684]: DEBUG nova.virt.hardware [None req-2cf58caa-82a8-447a-a1a3-7c18fd096f3f tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2184.503013] env[62684]: DEBUG nova.virt.hardware [None req-2cf58caa-82a8-447a-a1a3-7c18fd096f3f tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2184.503272] env[62684]: DEBUG nova.virt.hardware [None req-2cf58caa-82a8-447a-a1a3-7c18fd096f3f tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2184.503573] env[62684]: DEBUG nova.virt.hardware [None req-2cf58caa-82a8-447a-a1a3-7c18fd096f3f tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2184.503789] env[62684]: DEBUG nova.virt.hardware [None req-2cf58caa-82a8-447a-a1a3-7c18fd096f3f tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2184.504244] env[62684]: DEBUG nova.virt.hardware [None req-2cf58caa-82a8-447a-a1a3-7c18fd096f3f tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2184.504502] env[62684]: DEBUG nova.virt.hardware [None req-2cf58caa-82a8-447a-a1a3-7c18fd096f3f tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2184.504771] env[62684]: DEBUG nova.virt.hardware [None req-2cf58caa-82a8-447a-a1a3-7c18fd096f3f tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2184.511614] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2cf58caa-82a8-447a-a1a3-7c18fd096f3f tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Reconfiguring VM to attach interface {{(pid=62684) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 2184.519222] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-99101b25-c667-40e7-b514-f4bfbaa76ec5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.540392] env[62684]: DEBUG oslo_vmware.api [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053502, 'name': MoveVirtualDisk_Task, 'duration_secs': 3.446218} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2184.543118] env[62684]: INFO nova.virt.vmwareapi.ds_util [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_096271ae-590d-4396-ad3d-1c6f8160d4fa/OSTACK_IMG_096271ae-590d-4396-ad3d-1c6f8160d4fa.vmdk to [datastore2] devstack-image-cache_base/b5ede0c6-ad0d-4c75-b005-a332dfdc71df/b5ede0c6-ad0d-4c75-b005-a332dfdc71df.vmdk. [ 2184.543423] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Cleaning up location [datastore2] OSTACK_IMG_096271ae-590d-4396-ad3d-1c6f8160d4fa {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 2184.543636] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_096271ae-590d-4396-ad3d-1c6f8160d4fa {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2184.544637] env[62684]: DEBUG oslo_vmware.api [None req-2cf58caa-82a8-447a-a1a3-7c18fd096f3f tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 2184.544637] env[62684]: value = "task-2053510" [ 2184.544637] env[62684]: _type = "Task" [ 2184.544637] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2184.544849] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6766b829-87a6-4bae-94ce-5ecacf1f903b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.561255] env[62684]: DEBUG oslo_vmware.api [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 2184.561255] env[62684]: value = "task-2053511" [ 2184.561255] env[62684]: _type = "Task" [ 2184.561255] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2184.564836] env[62684]: DEBUG oslo_vmware.api [None req-2cf58caa-82a8-447a-a1a3-7c18fd096f3f tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053510, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2184.579025] env[62684]: DEBUG oslo_vmware.api [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053511, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2184.579025] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f21819de-1ebd-4489-8445-2aa35ae318d6 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "f4fab142-8066-43c1-abaa-a9f66775114c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.737s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2184.627098] env[62684]: DEBUG nova.compute.manager [req-c97123dd-0e27-4960-b5d9-e2959e39b031 req-e8e1ca49-4a79-43d0-a8d5-2bdbf7b52fb3 service nova] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Received event network-changed-3fcb3920-5e10-45e2-865d-cc9b89a1e335 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2184.627438] env[62684]: DEBUG nova.compute.manager [req-c97123dd-0e27-4960-b5d9-e2959e39b031 req-e8e1ca49-4a79-43d0-a8d5-2bdbf7b52fb3 service nova] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Refreshing instance network info cache due to event network-changed-3fcb3920-5e10-45e2-865d-cc9b89a1e335. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2184.627540] env[62684]: DEBUG oslo_concurrency.lockutils [req-c97123dd-0e27-4960-b5d9-e2959e39b031 req-e8e1ca49-4a79-43d0-a8d5-2bdbf7b52fb3 service nova] Acquiring lock "refresh_cache-2baabe7a-ed33-4cef-9acc-a7b804610b0a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2184.627837] env[62684]: DEBUG oslo_concurrency.lockutils [req-c97123dd-0e27-4960-b5d9-e2959e39b031 req-e8e1ca49-4a79-43d0-a8d5-2bdbf7b52fb3 service nova] Acquired lock "refresh_cache-2baabe7a-ed33-4cef-9acc-a7b804610b0a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2184.628008] env[62684]: DEBUG nova.network.neutron [req-c97123dd-0e27-4960-b5d9-e2959e39b031 req-e8e1ca49-4a79-43d0-a8d5-2bdbf7b52fb3 service nova] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Refreshing network info cache for port 3fcb3920-5e10-45e2-865d-cc9b89a1e335 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2184.739383] env[62684]: DEBUG oslo_vmware.api [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053508, 'name': PowerOnVM_Task, 'duration_secs': 2.301323} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2184.739760] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2184.740170] env[62684]: INFO nova.compute.manager [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Took 20.68 seconds to spawn the instance on the hypervisor. [ 2184.740388] env[62684]: DEBUG nova.compute.manager [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2184.741504] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79f69f56-b4a5-439f-9240-40a60b85b217 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.749557] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1d514fe-8dbb-4063-9774-0019575f2e76 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.760896] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9bfc6d6-ac7c-4c70-bf5a-1507c4dfd671 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.795120] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31eea730-9deb-4c03-b980-b1a3825e2389 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.804085] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0c44011-434a-4265-8a04-c2db5a95cb64 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.819570] env[62684]: DEBUG nova.compute.provider_tree [None req-6e8cbf2e-0f5d-4a8f-8276-9ab609c48130 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2185.057640] env[62684]: DEBUG oslo_vmware.api [None req-2cf58caa-82a8-447a-a1a3-7c18fd096f3f tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053510, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2185.079427] env[62684]: DEBUG oslo_vmware.api [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053511, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137996} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2185.079786] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2185.080016] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b5ede0c6-ad0d-4c75-b005-a332dfdc71df/b5ede0c6-ad0d-4c75-b005-a332dfdc71df.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2185.080352] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b5ede0c6-ad0d-4c75-b005-a332dfdc71df/b5ede0c6-ad0d-4c75-b005-a332dfdc71df.vmdk to [datastore2] daf1486b-d5c2-4341-8a27-36eeeb08cd26/daf1486b-d5c2-4341-8a27-36eeeb08cd26.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2185.080675] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f2c8f489-349d-44b7-99b3-9baa8920a6bf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.088390] env[62684]: DEBUG oslo_vmware.api [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 2185.088390] env[62684]: value = "task-2053512" [ 2185.088390] env[62684]: _type = "Task" [ 2185.088390] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2185.098540] env[62684]: DEBUG oslo_vmware.api [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053512, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2185.267162] env[62684]: INFO nova.compute.manager [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Took 26.39 seconds to build instance. [ 2185.323966] env[62684]: DEBUG nova.scheduler.client.report [None req-6e8cbf2e-0f5d-4a8f-8276-9ab609c48130 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2185.373885] env[62684]: DEBUG nova.network.neutron [-] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2185.480265] env[62684]: DEBUG nova.network.neutron [req-c97123dd-0e27-4960-b5d9-e2959e39b031 req-e8e1ca49-4a79-43d0-a8d5-2bdbf7b52fb3 service nova] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Updated VIF entry in instance network info cache for port 3fcb3920-5e10-45e2-865d-cc9b89a1e335. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2185.480743] env[62684]: DEBUG nova.network.neutron [req-c97123dd-0e27-4960-b5d9-e2959e39b031 req-e8e1ca49-4a79-43d0-a8d5-2bdbf7b52fb3 service nova] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Updating instance_info_cache with network_info: [{"id": "3fcb3920-5e10-45e2-865d-cc9b89a1e335", "address": "fa:16:3e:2f:71:d6", "network": {"id": "bd253713-4e81-4c94-9689-22b81e7f51b6", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-307001665-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd812751722143fabedfa986a2d98b59", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3fcb3920-5e", "ovs_interfaceid": "3fcb3920-5e10-45e2-865d-cc9b89a1e335", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2185.541141] env[62684]: DEBUG nova.network.neutron [req-e4391b04-ffa5-4d65-8171-7664e2a06382 req-fc1e17fe-b758-4c00-8931-0381bb1d1e97 service nova] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Updated VIF entry in instance network info cache for port e60b8515-c469-46d3-945b-bc843ccffc44. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2185.541141] env[62684]: DEBUG nova.network.neutron [req-e4391b04-ffa5-4d65-8171-7664e2a06382 req-fc1e17fe-b758-4c00-8931-0381bb1d1e97 service nova] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Updating instance_info_cache with network_info: [{"id": "b5747949-00d7-4815-9080-52285a6a8813", "address": "fa:16:3e:fd:34:0c", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.240", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5747949-00", "ovs_interfaceid": "b5747949-00d7-4815-9080-52285a6a8813", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e60b8515-c469-46d3-945b-bc843ccffc44", "address": "fa:16:3e:f9:ed:d2", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape60b8515-c4", "ovs_interfaceid": "e60b8515-c469-46d3-945b-bc843ccffc44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2185.559450] env[62684]: DEBUG oslo_vmware.api [None req-2cf58caa-82a8-447a-a1a3-7c18fd096f3f tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053510, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2185.575070] env[62684]: DEBUG oslo_concurrency.lockutils [None req-16a481bb-9a06-43d9-b66b-c1c90c43a7ec tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "f4fab142-8066-43c1-abaa-a9f66775114c" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2185.575389] env[62684]: DEBUG oslo_concurrency.lockutils [None req-16a481bb-9a06-43d9-b66b-c1c90c43a7ec tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "f4fab142-8066-43c1-abaa-a9f66775114c" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2185.575598] env[62684]: DEBUG nova.compute.manager [None req-16a481bb-9a06-43d9-b66b-c1c90c43a7ec tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2185.577321] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-756b14c8-9b5f-4769-992f-897229e86f16 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.586087] env[62684]: DEBUG nova.compute.manager [None req-16a481bb-9a06-43d9-b66b-c1c90c43a7ec tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62684) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 2185.586087] env[62684]: DEBUG nova.objects.instance [None req-16a481bb-9a06-43d9-b66b-c1c90c43a7ec tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lazy-loading 'flavor' on Instance uuid f4fab142-8066-43c1-abaa-a9f66775114c {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2185.599432] env[62684]: DEBUG oslo_vmware.api [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053512, 'name': CopyVirtualDisk_Task} progress is 12%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2185.768944] env[62684]: DEBUG oslo_concurrency.lockutils [None req-061b87be-6ee5-4961-80a1-9cdcc0435a64 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "99a9653c-7221-4495-be5f-5441dc8da0f4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.903s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2185.829809] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6e8cbf2e-0f5d-4a8f-8276-9ab609c48130 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.894s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2185.832421] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.671s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2185.834240] env[62684]: INFO nova.compute.claims [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2185.857086] env[62684]: INFO nova.scheduler.client.report [None req-6e8cbf2e-0f5d-4a8f-8276-9ab609c48130 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Deleted allocations for instance 6b461482-0606-4af3-98a2-88c0318d1a69 [ 2185.875850] env[62684]: INFO nova.compute.manager [-] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Took 1.45 seconds to deallocate network for instance. [ 2185.983704] env[62684]: DEBUG oslo_concurrency.lockutils [req-c97123dd-0e27-4960-b5d9-e2959e39b031 req-e8e1ca49-4a79-43d0-a8d5-2bdbf7b52fb3 service nova] Releasing lock "refresh_cache-2baabe7a-ed33-4cef-9acc-a7b804610b0a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2186.044261] env[62684]: DEBUG oslo_concurrency.lockutils [req-e4391b04-ffa5-4d65-8171-7664e2a06382 req-fc1e17fe-b758-4c00-8931-0381bb1d1e97 service nova] Releasing lock "refresh_cache-f037d6b2-2082-4611-985e-b9a077eb8250" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2186.062779] env[62684]: DEBUG oslo_vmware.api [None req-2cf58caa-82a8-447a-a1a3-7c18fd096f3f tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053510, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2186.095414] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-16a481bb-9a06-43d9-b66b-c1c90c43a7ec tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2186.095752] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-98ba6edd-88de-44f8-81e9-9fe03e3a890b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.105836] env[62684]: DEBUG oslo_vmware.api [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053512, 'name': CopyVirtualDisk_Task} progress is 32%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2186.107632] env[62684]: DEBUG oslo_vmware.api [None req-16a481bb-9a06-43d9-b66b-c1c90c43a7ec tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2186.107632] env[62684]: value = "task-2053513" [ 2186.107632] env[62684]: _type = "Task" [ 2186.107632] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2186.118066] env[62684]: DEBUG oslo_vmware.api [None req-16a481bb-9a06-43d9-b66b-c1c90c43a7ec tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053513, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2186.366674] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6e8cbf2e-0f5d-4a8f-8276-9ab609c48130 tempest-ServersTestManualDisk-83935929 tempest-ServersTestManualDisk-83935929-project-member] Lock "6b461482-0606-4af3-98a2-88c0318d1a69" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.247s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2186.386009] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e5e523ca-4565-4a55-8b68-373620ed36d8 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2186.415420] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2186.416113] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2186.562116] env[62684]: DEBUG oslo_vmware.api [None req-2cf58caa-82a8-447a-a1a3-7c18fd096f3f tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053510, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2186.604578] env[62684]: DEBUG oslo_vmware.api [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053512, 'name': CopyVirtualDisk_Task} progress is 54%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2186.619990] env[62684]: DEBUG oslo_vmware.api [None req-16a481bb-9a06-43d9-b66b-c1c90c43a7ec tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053513, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2186.732637] env[62684]: DEBUG oslo_concurrency.lockutils [None req-764f5348-135e-4a0f-a1d7-364f4a8bbc20 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "99a9653c-7221-4495-be5f-5441dc8da0f4" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2186.733019] env[62684]: DEBUG oslo_concurrency.lockutils [None req-764f5348-135e-4a0f-a1d7-364f4a8bbc20 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "99a9653c-7221-4495-be5f-5441dc8da0f4" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2186.733249] env[62684]: DEBUG nova.compute.manager [None req-764f5348-135e-4a0f-a1d7-364f4a8bbc20 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2186.734225] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b84efbc0-b9c4-420b-a52d-f1f66e420095 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.743902] env[62684]: DEBUG nova.compute.manager [None req-764f5348-135e-4a0f-a1d7-364f4a8bbc20 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62684) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 2186.744595] env[62684]: DEBUG nova.objects.instance [None req-764f5348-135e-4a0f-a1d7-364f4a8bbc20 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lazy-loading 'flavor' on Instance uuid 99a9653c-7221-4495-be5f-5441dc8da0f4 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2186.928400] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2186.935208] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Starting heal instance info cache {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2186.935208] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Rebuilding the list of instances to heal {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2187.006859] env[62684]: DEBUG nova.compute.manager [req-bfc16e9a-f462-41c6-8ed2-8ee44176e918 req-7c1b0f85-fb38-47ff-9b78-b41ae43175b8 service nova] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Received event network-vif-deleted-c12d6728-00ee-47e7-9fa8-92384e9f7a3c {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2187.064066] env[62684]: DEBUG oslo_vmware.api [None req-2cf58caa-82a8-447a-a1a3-7c18fd096f3f tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053510, 'name': ReconfigVM_Task, 'duration_secs': 2.268874} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2187.064759] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2cf58caa-82a8-447a-a1a3-7c18fd096f3f tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Releasing lock "f037d6b2-2082-4611-985e-b9a077eb8250" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2187.064980] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2cf58caa-82a8-447a-a1a3-7c18fd096f3f tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Reconfigured VM to attach interface {{(pid=62684) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 2187.109219] env[62684]: DEBUG oslo_vmware.api [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053512, 'name': CopyVirtualDisk_Task} progress is 74%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2187.125394] env[62684]: DEBUG oslo_vmware.api [None req-16a481bb-9a06-43d9-b66b-c1c90c43a7ec tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053513, 'name': PowerOffVM_Task, 'duration_secs': 0.594931} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2187.128525] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-16a481bb-9a06-43d9-b66b-c1c90c43a7ec tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2187.128799] env[62684]: DEBUG nova.compute.manager [None req-16a481bb-9a06-43d9-b66b-c1c90c43a7ec tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2187.130062] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-312d40db-10d3-49ec-8094-947154545704 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.147743] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-247c9841-9e8d-465e-b568-33c8580d3b59 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.159927] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45e3bf6f-cfdc-4cdf-bf9e-6c8e18dbf610 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.204284] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3cf295d-e424-4a59-b56b-2d6eafd4b4da {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.217299] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26171587-6d4e-4ae6-8c1a-3b141db15378 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.235753] env[62684]: DEBUG nova.compute.provider_tree [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2187.250694] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-764f5348-135e-4a0f-a1d7-364f4a8bbc20 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2187.250694] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c82e5e72-19a3-4ba2-a01c-628e53903a49 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.260381] env[62684]: DEBUG oslo_vmware.api [None req-764f5348-135e-4a0f-a1d7-364f4a8bbc20 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2187.260381] env[62684]: value = "task-2053514" [ 2187.260381] env[62684]: _type = "Task" [ 2187.260381] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2187.274257] env[62684]: DEBUG oslo_vmware.api [None req-764f5348-135e-4a0f-a1d7-364f4a8bbc20 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053514, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2187.445296] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Skipping network cache update for instance because it is Building. {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 2187.524588] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "refresh_cache-ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2187.524686] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired lock "refresh_cache-ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2187.524806] env[62684]: DEBUG nova.network.neutron [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Forcefully refreshing network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2187.525383] env[62684]: DEBUG nova.objects.instance [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lazy-loading 'info_cache' on Instance uuid ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2187.571150] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2cf58caa-82a8-447a-a1a3-7c18fd096f3f tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "interface-f037d6b2-2082-4611-985e-b9a077eb8250-e60b8515-c469-46d3-945b-bc843ccffc44" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.575s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2187.609327] env[62684]: DEBUG oslo_vmware.api [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053512, 'name': CopyVirtualDisk_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2187.655700] env[62684]: DEBUG oslo_concurrency.lockutils [None req-16a481bb-9a06-43d9-b66b-c1c90c43a7ec tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "f4fab142-8066-43c1-abaa-a9f66775114c" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.080s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2187.739198] env[62684]: DEBUG nova.scheduler.client.report [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2187.772718] env[62684]: DEBUG oslo_vmware.api [None req-764f5348-135e-4a0f-a1d7-364f4a8bbc20 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053514, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2188.102723] env[62684]: DEBUG oslo_vmware.api [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053512, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.678717} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2188.104102] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b5ede0c6-ad0d-4c75-b005-a332dfdc71df/b5ede0c6-ad0d-4c75-b005-a332dfdc71df.vmdk to [datastore2] daf1486b-d5c2-4341-8a27-36eeeb08cd26/daf1486b-d5c2-4341-8a27-36eeeb08cd26.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2188.107698] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c14ecb4-bc50-44d0-b294-e3575b25bbe6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.111016] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "02480039-f749-402a-92db-df664304a5bf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2188.111277] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "02480039-f749-402a-92db-df664304a5bf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2188.133501] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Reconfiguring VM instance instance-00000050 to attach disk [datastore2] daf1486b-d5c2-4341-8a27-36eeeb08cd26/daf1486b-d5c2-4341-8a27-36eeeb08cd26.vmdk or device None with type streamOptimized {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2188.135171] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-082695ef-2067-4dd8-8eff-cda35fbde928 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.157430] env[62684]: DEBUG oslo_vmware.api [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 2188.157430] env[62684]: value = "task-2053515" [ 2188.157430] env[62684]: _type = "Task" [ 2188.157430] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2188.166778] env[62684]: DEBUG oslo_vmware.api [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053515, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2188.244936] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.412s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2188.245759] env[62684]: DEBUG nova.compute.manager [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2188.249295] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e0601ab0-81d2-4626-ac6d-5db833ef6fbe tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.786s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2188.249415] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e0601ab0-81d2-4626-ac6d-5db833ef6fbe tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2188.252122] env[62684]: DEBUG oslo_concurrency.lockutils [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.258s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2188.253722] env[62684]: INFO nova.compute.claims [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2188.273063] env[62684]: DEBUG oslo_vmware.api [None req-764f5348-135e-4a0f-a1d7-364f4a8bbc20 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053514, 'name': PowerOffVM_Task, 'duration_secs': 0.800991} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2188.274263] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-764f5348-135e-4a0f-a1d7-364f4a8bbc20 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2188.277581] env[62684]: DEBUG nova.compute.manager [None req-764f5348-135e-4a0f-a1d7-364f4a8bbc20 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2188.278593] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-936e0e2b-76f2-4c0b-a547-0f7a21794218 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.301858] env[62684]: INFO nova.scheduler.client.report [None req-e0601ab0-81d2-4626-ac6d-5db833ef6fbe tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Deleted allocations for instance 0a8d7c48-cf90-4baf-a900-38fbd62869a6 [ 2188.614112] env[62684]: DEBUG nova.compute.manager [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2188.669502] env[62684]: DEBUG oslo_vmware.api [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053515, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2188.758969] env[62684]: DEBUG nova.compute.utils [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2188.765018] env[62684]: DEBUG nova.compute.manager [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2188.765018] env[62684]: DEBUG nova.network.neutron [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2188.793369] env[62684]: DEBUG oslo_concurrency.lockutils [None req-764f5348-135e-4a0f-a1d7-364f4a8bbc20 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "99a9653c-7221-4495-be5f-5441dc8da0f4" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.060s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2188.810908] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e0601ab0-81d2-4626-ac6d-5db833ef6fbe tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "0a8d7c48-cf90-4baf-a900-38fbd62869a6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.439s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2188.878797] env[62684]: DEBUG nova.policy [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a3800d71923848db8635de9a8a2ff9f6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '76d88ac878d44480b3b54b24ab87efa9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2189.146416] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2189.177242] env[62684]: DEBUG oslo_vmware.api [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053515, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2189.269016] env[62684]: DEBUG nova.compute.manager [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2189.303596] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0ce8467d-0c2c-4d62-9cff-69775e7beb75 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "interface-f037d6b2-2082-4611-985e-b9a077eb8250-e60b8515-c469-46d3-945b-bc843ccffc44" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2189.303862] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0ce8467d-0c2c-4d62-9cff-69775e7beb75 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "interface-f037d6b2-2082-4611-985e-b9a077eb8250-e60b8515-c469-46d3-945b-bc843ccffc44" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2189.330582] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6a301a5d-eb03-4600-bdb0-0ca3153101f9 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "f4fab142-8066-43c1-abaa-a9f66775114c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2189.330582] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6a301a5d-eb03-4600-bdb0-0ca3153101f9 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "f4fab142-8066-43c1-abaa-a9f66775114c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2189.330792] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6a301a5d-eb03-4600-bdb0-0ca3153101f9 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "f4fab142-8066-43c1-abaa-a9f66775114c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2189.330981] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6a301a5d-eb03-4600-bdb0-0ca3153101f9 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "f4fab142-8066-43c1-abaa-a9f66775114c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2189.331309] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6a301a5d-eb03-4600-bdb0-0ca3153101f9 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "f4fab142-8066-43c1-abaa-a9f66775114c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2189.334219] env[62684]: INFO nova.compute.manager [None req-6a301a5d-eb03-4600-bdb0-0ca3153101f9 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Terminating instance [ 2189.337094] env[62684]: DEBUG nova.compute.manager [None req-6a301a5d-eb03-4600-bdb0-0ca3153101f9 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2189.338177] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6a301a5d-eb03-4600-bdb0-0ca3153101f9 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2189.338406] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ce34940-b574-4208-941d-7630b1b1576a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.350946] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6a301a5d-eb03-4600-bdb0-0ca3153101f9 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2189.351957] env[62684]: DEBUG nova.network.neutron [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Updating instance_info_cache with network_info: [{"id": "5d09e22d-005f-49a7-8c55-7d69dfd47687", "address": "fa:16:3e:fe:35:e7", "network": {"id": "aa52badb-0b73-48bc-afaa-5e06a97d5c7d", "bridge": "br-int", "label": "tempest-ServersTestJSON-556342067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c54f74085f343d2b790145b0d82a9f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d09e22d-00", "ovs_interfaceid": "5d09e22d-005f-49a7-8c55-7d69dfd47687", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2189.356934] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ea5475a2-c052-4fba-9ce1-adf7dacf1d4a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.370335] env[62684]: DEBUG nova.network.neutron [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Successfully created port: 34ae3a7e-83a9-4ebd-8582-bb73f3050948 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2189.555685] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6b9d8f1-7728-4562-b953-247ed363b663 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.560807] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f96d34af-755e-4822-8f27-2ec5375ca63a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.592796] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea4c9621-b58e-4255-83f5-cef56bd2dbf6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.600270] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8996ac73-24bd-40d2-ac7b-9eae962e7d65 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.614133] env[62684]: DEBUG nova.compute.provider_tree [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2189.667823] env[62684]: DEBUG oslo_vmware.api [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053515, 'name': ReconfigVM_Task, 'duration_secs': 1.049376} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2189.668215] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Reconfigured VM instance instance-00000050 to attach disk [datastore2] daf1486b-d5c2-4341-8a27-36eeeb08cd26/daf1486b-d5c2-4341-8a27-36eeeb08cd26.vmdk or device None with type streamOptimized {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2189.668931] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e70d8b66-b837-4e79-ab0f-d40679d2470d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.676406] env[62684]: DEBUG oslo_vmware.api [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 2189.676406] env[62684]: value = "task-2053517" [ 2189.676406] env[62684]: _type = "Task" [ 2189.676406] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2189.686648] env[62684]: DEBUG oslo_vmware.api [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053517, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2189.809710] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0ce8467d-0c2c-4d62-9cff-69775e7beb75 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "f037d6b2-2082-4611-985e-b9a077eb8250" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2189.810030] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0ce8467d-0c2c-4d62-9cff-69775e7beb75 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquired lock "f037d6b2-2082-4611-985e-b9a077eb8250" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2189.810898] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ec7c125-0b13-40b6-bbc7-a8ca3176d917 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.829525] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa56a7b3-daec-4a20-a995-451d753d5c79 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.858644] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0ce8467d-0c2c-4d62-9cff-69775e7beb75 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Reconfiguring VM to detach interface {{(pid=62684) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 2189.859241] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Releasing lock "refresh_cache-ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2189.859432] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Updated the network info_cache for instance {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 2189.859676] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8a7b35a2-37e3-4910-ad02-d3eb89ab4ff7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.872482] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2189.873121] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2189.873657] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2189.873894] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2189.874061] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2189.874241] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2189.874456] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62684) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2189.874516] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2189.879324] env[62684]: DEBUG oslo_vmware.api [None req-0ce8467d-0c2c-4d62-9cff-69775e7beb75 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 2189.879324] env[62684]: value = "task-2053518" [ 2189.879324] env[62684]: _type = "Task" [ 2189.879324] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2189.887737] env[62684]: DEBUG oslo_vmware.api [None req-0ce8467d-0c2c-4d62-9cff-69775e7beb75 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053518, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2190.015124] env[62684]: INFO nova.compute.manager [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Rebuilding instance [ 2190.064752] env[62684]: DEBUG nova.compute.manager [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2190.064752] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2658f00e-ce0d-446a-b0e7-2f7cd8569af0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.120203] env[62684]: DEBUG nova.scheduler.client.report [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2190.188372] env[62684]: DEBUG oslo_vmware.api [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053517, 'name': Rename_Task, 'duration_secs': 0.208854} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2190.188810] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2190.189145] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-97d60a79-f391-414d-8af1-fbb84bfce163 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.196788] env[62684]: DEBUG oslo_vmware.api [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 2190.196788] env[62684]: value = "task-2053519" [ 2190.196788] env[62684]: _type = "Task" [ 2190.196788] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2190.206472] env[62684]: DEBUG oslo_vmware.api [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053519, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2190.283437] env[62684]: DEBUG nova.compute.manager [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2190.315461] env[62684]: DEBUG nova.virt.hardware [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2190.315746] env[62684]: DEBUG nova.virt.hardware [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2190.315912] env[62684]: DEBUG nova.virt.hardware [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2190.316117] env[62684]: DEBUG nova.virt.hardware [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2190.316273] env[62684]: DEBUG nova.virt.hardware [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2190.316489] env[62684]: DEBUG nova.virt.hardware [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2190.316772] env[62684]: DEBUG nova.virt.hardware [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2190.316943] env[62684]: DEBUG nova.virt.hardware [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2190.317146] env[62684]: DEBUG nova.virt.hardware [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2190.317320] env[62684]: DEBUG nova.virt.hardware [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2190.317503] env[62684]: DEBUG nova.virt.hardware [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2190.318420] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98a220d5-6c73-4a7f-b276-4d1937dc3376 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.326809] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68477b2f-c874-43e5-9012-c879054f3390 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.377744] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2190.387544] env[62684]: DEBUG oslo_vmware.api [None req-0ce8467d-0c2c-4d62-9cff-69775e7beb75 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053518, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2190.578456] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2190.578932] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-60ef8fc3-6dc5-4ac5-8bdb-1c09c1c77b78 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.587833] env[62684]: DEBUG oslo_vmware.api [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2190.587833] env[62684]: value = "task-2053520" [ 2190.587833] env[62684]: _type = "Task" [ 2190.587833] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2190.600466] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] VM already powered off {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2190.604375] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2190.605204] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d613d6ad-bf13-4eb9-8255-3b4bf7ce59a5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.616542] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2190.616868] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-83a0c93f-965d-483a-8140-efa3fb889bb0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.623291] env[62684]: DEBUG oslo_concurrency.lockutils [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.372s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2190.623799] env[62684]: DEBUG nova.compute.manager [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2190.626779] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e5e523ca-4565-4a55-8b68-373620ed36d8 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.241s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2190.627074] env[62684]: DEBUG nova.objects.instance [None req-e5e523ca-4565-4a55-8b68-373620ed36d8 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Lazy-loading 'resources' on Instance uuid 3a967adf-8c46-4787-b1d1-4ed701399576 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2190.707548] env[62684]: DEBUG oslo_vmware.api [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053519, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2190.896554] env[62684]: DEBUG oslo_vmware.api [None req-0ce8467d-0c2c-4d62-9cff-69775e7beb75 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053518, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2191.131403] env[62684]: DEBUG nova.compute.utils [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2191.139262] env[62684]: DEBUG nova.compute.manager [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2191.139262] env[62684]: DEBUG nova.network.neutron [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2191.211045] env[62684]: DEBUG oslo_vmware.api [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053519, 'name': PowerOnVM_Task, 'duration_secs': 0.807573} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2191.211360] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2191.255903] env[62684]: DEBUG nova.policy [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e3a532747bda4c7e8aa2892b424a47ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '263c101fcc5e493789b79dfd1ba97cc0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2191.395748] env[62684]: DEBUG nova.compute.manager [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2191.397051] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e803786a-b2a4-4fcc-bc70-015aaebe746f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.406371] env[62684]: DEBUG oslo_vmware.api [None req-0ce8467d-0c2c-4d62-9cff-69775e7beb75 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053518, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2191.475152] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cf22140-c6fa-492e-809f-7d3c4fd8e15f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.487349] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75df2002-3fe8-4a79-b1e4-009cf5a87015 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.526492] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2724c4ba-2aa0-402e-97a0-7d60c6769176 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.535602] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03e27b4e-f19f-4f50-ac12-57cd40a5467c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.549706] env[62684]: DEBUG nova.compute.provider_tree [None req-e5e523ca-4565-4a55-8b68-373620ed36d8 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2191.641677] env[62684]: DEBUG nova.compute.manager [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2191.763476] env[62684]: DEBUG nova.network.neutron [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Successfully created port: 0d17b2ea-8e17-456b-87e2-1e2bec93f187 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2191.821412] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Acquiring lock "3ff55331-6d5c-4558-b932-e266670f2ac9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2191.821412] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Lock "3ff55331-6d5c-4558-b932-e266670f2ac9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2191.896300] env[62684]: DEBUG oslo_vmware.api [None req-0ce8467d-0c2c-4d62-9cff-69775e7beb75 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053518, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2191.924670] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3cdf8a46-21ab-454a-b49f-e2ba539b1684 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lock "daf1486b-d5c2-4341-8a27-36eeeb08cd26" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 25.976s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2192.054809] env[62684]: DEBUG nova.scheduler.client.report [None req-e5e523ca-4565-4a55-8b68-373620ed36d8 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2192.324046] env[62684]: DEBUG nova.compute.manager [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2192.392408] env[62684]: DEBUG oslo_vmware.api [None req-0ce8467d-0c2c-4d62-9cff-69775e7beb75 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053518, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2192.560742] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e5e523ca-4565-4a55-8b68-373620ed36d8 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.934s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2192.567916] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.420s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2192.568361] env[62684]: INFO nova.compute.claims [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2192.596854] env[62684]: INFO nova.scheduler.client.report [None req-e5e523ca-4565-4a55-8b68-373620ed36d8 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Deleted allocations for instance 3a967adf-8c46-4787-b1d1-4ed701399576 [ 2192.657947] env[62684]: DEBUG nova.compute.manager [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2192.689600] env[62684]: DEBUG nova.virt.hardware [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2192.689888] env[62684]: DEBUG nova.virt.hardware [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2192.690102] env[62684]: DEBUG nova.virt.hardware [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2192.690309] env[62684]: DEBUG nova.virt.hardware [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2192.690465] env[62684]: DEBUG nova.virt.hardware [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2192.690617] env[62684]: DEBUG nova.virt.hardware [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2192.690833] env[62684]: DEBUG nova.virt.hardware [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2192.691008] env[62684]: DEBUG nova.virt.hardware [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2192.691287] env[62684]: DEBUG nova.virt.hardware [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2192.691553] env[62684]: DEBUG nova.virt.hardware [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2192.691722] env[62684]: DEBUG nova.virt.hardware [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2192.692893] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bce0c37-48e7-4857-bdec-754ad5976fab {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.702901] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a517516b-3a15-49b0-9ef3-1520aa7522c4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.846348] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2192.846549] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2192.846763] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Deleting the datastore file [datastore2] 99a9653c-7221-4495-be5f-5441dc8da0f4 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2192.847056] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fe11a890-9e0b-4230-91cb-5fa15059db3f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.850501] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2192.857975] env[62684]: DEBUG oslo_vmware.api [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2192.857975] env[62684]: value = "task-2053522" [ 2192.857975] env[62684]: _type = "Task" [ 2192.857975] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2192.862043] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6a301a5d-eb03-4600-bdb0-0ca3153101f9 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2192.862306] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6a301a5d-eb03-4600-bdb0-0ca3153101f9 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2192.862497] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a301a5d-eb03-4600-bdb0-0ca3153101f9 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Deleting the datastore file [datastore2] f4fab142-8066-43c1-abaa-a9f66775114c {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2192.863255] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6c2d7c4b-cad9-48c0-b9dc-61a7aadbe623 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.868265] env[62684]: DEBUG oslo_vmware.api [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053522, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2192.872580] env[62684]: DEBUG oslo_vmware.api [None req-6a301a5d-eb03-4600-bdb0-0ca3153101f9 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2192.872580] env[62684]: value = "task-2053523" [ 2192.872580] env[62684]: _type = "Task" [ 2192.872580] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2192.880374] env[62684]: DEBUG oslo_vmware.api [None req-6a301a5d-eb03-4600-bdb0-0ca3153101f9 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053523, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2192.890578] env[62684]: DEBUG oslo_vmware.api [None req-0ce8467d-0c2c-4d62-9cff-69775e7beb75 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053518, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2193.050507] env[62684]: DEBUG oslo_concurrency.lockutils [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Acquiring lock "58e67d8e-900e-4d22-a4fd-fe493758d4f2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2193.051028] env[62684]: DEBUG oslo_concurrency.lockutils [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Lock "58e67d8e-900e-4d22-a4fd-fe493758d4f2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2193.105123] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e5e523ca-4565-4a55-8b68-373620ed36d8 tempest-ServerRescueTestJSON-761077259 tempest-ServerRescueTestJSON-761077259-project-member] Lock "3a967adf-8c46-4787-b1d1-4ed701399576" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.598s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2193.366409] env[62684]: DEBUG oslo_vmware.api [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053522, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14701} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2193.366668] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2193.366909] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2193.367156] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2193.382567] env[62684]: DEBUG oslo_vmware.api [None req-6a301a5d-eb03-4600-bdb0-0ca3153101f9 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053523, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144458} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2193.383251] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a301a5d-eb03-4600-bdb0-0ca3153101f9 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2193.383251] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6a301a5d-eb03-4600-bdb0-0ca3153101f9 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2193.383251] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6a301a5d-eb03-4600-bdb0-0ca3153101f9 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2193.387021] env[62684]: INFO nova.compute.manager [None req-6a301a5d-eb03-4600-bdb0-0ca3153101f9 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Took 4.05 seconds to destroy the instance on the hypervisor. [ 2193.387021] env[62684]: DEBUG oslo.service.loopingcall [None req-6a301a5d-eb03-4600-bdb0-0ca3153101f9 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2193.387021] env[62684]: DEBUG nova.compute.manager [-] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2193.387021] env[62684]: DEBUG nova.network.neutron [-] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2193.394637] env[62684]: DEBUG oslo_vmware.api [None req-0ce8467d-0c2c-4d62-9cff-69775e7beb75 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053518, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2193.554348] env[62684]: DEBUG nova.compute.manager [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2193.833074] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-915a3727-b2ab-4b1c-8a4a-e9a7d7402bc7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2193.841290] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23ee6cf5-f194-445e-baac-a5d02c6606e5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2193.880020] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bce6e01-ead9-46a7-b666-ac63317553df {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2193.884797] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73e93706-3560-4192-ad0e-9b7acc644f1a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2193.903256] env[62684]: DEBUG nova.compute.provider_tree [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2193.908113] env[62684]: DEBUG oslo_vmware.api [None req-0ce8467d-0c2c-4d62-9cff-69775e7beb75 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053518, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2193.948573] env[62684]: DEBUG nova.compute.manager [req-25b0013b-7f57-46d4-a744-78f12b3d733a req-a8346993-aff1-44e9-9982-c7a1e4c9169d service nova] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Received event network-vif-deleted-89a358de-54fa-41bb-ae43-85a7abbb900b {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2193.948573] env[62684]: INFO nova.compute.manager [req-25b0013b-7f57-46d4-a744-78f12b3d733a req-a8346993-aff1-44e9-9982-c7a1e4c9169d service nova] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Neutron deleted interface 89a358de-54fa-41bb-ae43-85a7abbb900b; detaching it from the instance and deleting it from the info cache [ 2193.948722] env[62684]: DEBUG nova.network.neutron [req-25b0013b-7f57-46d4-a744-78f12b3d733a req-a8346993-aff1-44e9-9982-c7a1e4c9169d service nova] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2194.023336] env[62684]: DEBUG nova.compute.manager [req-f8b6ec31-f4e4-4613-9356-5e9607a82a16 req-5e279f41-5ba3-4d7d-9785-72583613a94d service nova] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Received event network-vif-plugged-34ae3a7e-83a9-4ebd-8582-bb73f3050948 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2194.023508] env[62684]: DEBUG oslo_concurrency.lockutils [req-f8b6ec31-f4e4-4613-9356-5e9607a82a16 req-5e279f41-5ba3-4d7d-9785-72583613a94d service nova] Acquiring lock "264c6900-dbef-455e-95cc-1df73c735cc8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2194.023770] env[62684]: DEBUG oslo_concurrency.lockutils [req-f8b6ec31-f4e4-4613-9356-5e9607a82a16 req-5e279f41-5ba3-4d7d-9785-72583613a94d service nova] Lock "264c6900-dbef-455e-95cc-1df73c735cc8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2194.025039] env[62684]: DEBUG oslo_concurrency.lockutils [req-f8b6ec31-f4e4-4613-9356-5e9607a82a16 req-5e279f41-5ba3-4d7d-9785-72583613a94d service nova] Lock "264c6900-dbef-455e-95cc-1df73c735cc8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2194.025039] env[62684]: DEBUG nova.compute.manager [req-f8b6ec31-f4e4-4613-9356-5e9607a82a16 req-5e279f41-5ba3-4d7d-9785-72583613a94d service nova] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] No waiting events found dispatching network-vif-plugged-34ae3a7e-83a9-4ebd-8582-bb73f3050948 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2194.025485] env[62684]: WARNING nova.compute.manager [req-f8b6ec31-f4e4-4613-9356-5e9607a82a16 req-5e279f41-5ba3-4d7d-9785-72583613a94d service nova] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Received unexpected event network-vif-plugged-34ae3a7e-83a9-4ebd-8582-bb73f3050948 for instance with vm_state building and task_state spawning. [ 2194.077517] env[62684]: DEBUG oslo_concurrency.lockutils [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2194.163094] env[62684]: DEBUG nova.network.neutron [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Successfully updated port: 34ae3a7e-83a9-4ebd-8582-bb73f3050948 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2194.398971] env[62684]: DEBUG oslo_vmware.api [None req-0ce8467d-0c2c-4d62-9cff-69775e7beb75 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053518, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2194.409289] env[62684]: DEBUG nova.virt.hardware [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2194.409289] env[62684]: DEBUG nova.virt.hardware [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2194.409289] env[62684]: DEBUG nova.virt.hardware [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2194.409289] env[62684]: DEBUG nova.virt.hardware [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2194.409289] env[62684]: DEBUG nova.virt.hardware [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2194.409289] env[62684]: DEBUG nova.virt.hardware [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2194.409289] env[62684]: DEBUG nova.virt.hardware [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2194.409289] env[62684]: DEBUG nova.virt.hardware [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2194.409289] env[62684]: DEBUG nova.virt.hardware [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2194.409625] env[62684]: DEBUG nova.virt.hardware [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2194.409625] env[62684]: DEBUG nova.virt.hardware [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2194.413161] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a74e0b9-f124-4255-b359-91853b10ad5e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.415941] env[62684]: DEBUG nova.network.neutron [-] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2194.422924] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a115536a-d5d1-4de9-827d-ad1979816e0e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.440141] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9a:9c:00', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '171aeae0-6a27-44fc-bc3d-a2d5581fc702', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fe223d4f-0585-454f-b724-0cdff1d2ceea', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2194.448767] env[62684]: DEBUG oslo.service.loopingcall [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2194.450022] env[62684]: ERROR nova.scheduler.client.report [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [req-f2a91802-effd-477f-a08e-351febae3dae] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c23c281e-ec1f-4876-972e-a98655f2084f. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f2a91802-effd-477f-a08e-351febae3dae"}]} [ 2194.450748] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2194.453666] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-30aaba0b-ef9c-484f-a471-5c020ba381be {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.469275] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e2df09f7-1e7a-43b4-9b26-0cac81f32dc5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.479533] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2194.479533] env[62684]: value = "task-2053524" [ 2194.479533] env[62684]: _type = "Task" [ 2194.479533] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2194.483117] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a929dbe1-a5a9-4741-9147-22012845a32e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.498996] env[62684]: DEBUG nova.scheduler.client.report [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Refreshing inventories for resource provider c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2194.509917] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053524, 'name': CreateVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2194.525549] env[62684]: DEBUG nova.compute.manager [req-25b0013b-7f57-46d4-a744-78f12b3d733a req-a8346993-aff1-44e9-9982-c7a1e4c9169d service nova] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Detach interface failed, port_id=89a358de-54fa-41bb-ae43-85a7abbb900b, reason: Instance f4fab142-8066-43c1-abaa-a9f66775114c could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2194.526918] env[62684]: DEBUG nova.scheduler.client.report [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Updating ProviderTree inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2194.527216] env[62684]: DEBUG nova.compute.provider_tree [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2194.546984] env[62684]: DEBUG nova.scheduler.client.report [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Refreshing aggregate associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, aggregates: None {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2194.580536] env[62684]: DEBUG nova.scheduler.client.report [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Refreshing trait associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2194.644834] env[62684]: DEBUG nova.network.neutron [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Successfully updated port: 0d17b2ea-8e17-456b-87e2-1e2bec93f187 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2194.665484] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "refresh_cache-264c6900-dbef-455e-95cc-1df73c735cc8" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2194.665623] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquired lock "refresh_cache-264c6900-dbef-455e-95cc-1df73c735cc8" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2194.665775] env[62684]: DEBUG nova.network.neutron [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2194.805807] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a1d9114-05cd-48a5-8fce-5a815631a934 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.815357] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b145945-8a0f-4b8c-9eba-9cd213cc7cf0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.845224] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e7034d9-77a8-45f0-9bd8-5548a107fbd5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.852671] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04d4ba68-dafd-467d-9f9c-9d66a84b398d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.865595] env[62684]: DEBUG nova.compute.provider_tree [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2194.899845] env[62684]: DEBUG oslo_vmware.api [None req-0ce8467d-0c2c-4d62-9cff-69775e7beb75 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053518, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2194.922802] env[62684]: INFO nova.compute.manager [-] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Took 1.54 seconds to deallocate network for instance. [ 2195.004761] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053524, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2195.146766] env[62684]: DEBUG oslo_concurrency.lockutils [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "refresh_cache-ba12fa9a-10e3-4624-98b5-4ff7365e1940" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2195.146935] env[62684]: DEBUG oslo_concurrency.lockutils [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquired lock "refresh_cache-ba12fa9a-10e3-4624-98b5-4ff7365e1940" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2195.147114] env[62684]: DEBUG nova.network.neutron [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2195.225366] env[62684]: DEBUG nova.network.neutron [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2195.395780] env[62684]: DEBUG nova.network.neutron [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Updating instance_info_cache with network_info: [{"id": "34ae3a7e-83a9-4ebd-8582-bb73f3050948", "address": "fa:16:3e:fc:ef:f4", "network": {"id": "7678b347-6a54-4b84-9a4d-b566bbeb1ea4", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-51664912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76d88ac878d44480b3b54b24ab87efa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34ae3a7e-83", "ovs_interfaceid": "34ae3a7e-83a9-4ebd-8582-bb73f3050948", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2195.402856] env[62684]: DEBUG oslo_vmware.api [None req-0ce8467d-0c2c-4d62-9cff-69775e7beb75 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053518, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2195.403912] env[62684]: DEBUG nova.scheduler.client.report [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Updated inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f with generation 150 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2195.404103] env[62684]: DEBUG nova.compute.provider_tree [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Updating resource provider c23c281e-ec1f-4876-972e-a98655f2084f generation from 150 to 151 during operation: update_inventory {{(pid=62684) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2195.404396] env[62684]: DEBUG nova.compute.provider_tree [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2195.428698] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6a301a5d-eb03-4600-bdb0-0ca3153101f9 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2195.506673] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053524, 'name': CreateVM_Task, 'duration_secs': 0.668713} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2195.506863] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2195.507567] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2195.507771] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2195.508132] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2195.508399] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8738dfe8-7012-4abf-b899-74091a57582a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.513324] env[62684]: DEBUG oslo_vmware.api [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2195.513324] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ec7266-00ad-40a3-d08e-c959ecf709fc" [ 2195.513324] env[62684]: _type = "Task" [ 2195.513324] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2195.520997] env[62684]: DEBUG oslo_vmware.api [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ec7266-00ad-40a3-d08e-c959ecf709fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2195.704502] env[62684]: DEBUG nova.network.neutron [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2195.851722] env[62684]: DEBUG nova.network.neutron [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Updating instance_info_cache with network_info: [{"id": "0d17b2ea-8e17-456b-87e2-1e2bec93f187", "address": "fa:16:3e:1c:c6:37", "network": {"id": "1751424b-54a9-4879-9f32-aa15a9bb632c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-120070593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "263c101fcc5e493789b79dfd1ba97cc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92e4d027-e755-417b-8eea-9a8f24b85140", "external-id": "nsx-vlan-transportzone-756", "segmentation_id": 756, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d17b2ea-8e", "ovs_interfaceid": "0d17b2ea-8e17-456b-87e2-1e2bec93f187", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2195.899340] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Releasing lock "refresh_cache-264c6900-dbef-455e-95cc-1df73c735cc8" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2195.899659] env[62684]: DEBUG nova.compute.manager [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Instance network_info: |[{"id": "34ae3a7e-83a9-4ebd-8582-bb73f3050948", "address": "fa:16:3e:fc:ef:f4", "network": {"id": "7678b347-6a54-4b84-9a4d-b566bbeb1ea4", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-51664912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76d88ac878d44480b3b54b24ab87efa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34ae3a7e-83", "ovs_interfaceid": "34ae3a7e-83a9-4ebd-8582-bb73f3050948", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2195.899963] env[62684]: DEBUG oslo_vmware.api [None req-0ce8467d-0c2c-4d62-9cff-69775e7beb75 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053518, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2195.900343] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fc:ef:f4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'de5fcb06-b0d0-467f-86fe-06882165ac31', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '34ae3a7e-83a9-4ebd-8582-bb73f3050948', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2195.907527] env[62684]: DEBUG oslo.service.loopingcall [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2195.908035] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2195.908708] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.342s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2195.909219] env[62684]: DEBUG nova.compute.manager [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2195.911889] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-40c8e72a-0044-43d1-8d99-9e0b5716a612 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.927222] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 5.550s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2195.927408] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2195.927565] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62684) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2195.927866] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.077s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2195.929588] env[62684]: INFO nova.compute.claims [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2195.933253] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-477dee30-7a41-4e85-9d3c-b786d63805f7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.943232] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ab61756-cfe5-46a3-8f8b-da8c588a619d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.948371] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2195.948371] env[62684]: value = "task-2053525" [ 2195.948371] env[62684]: _type = "Task" [ 2195.948371] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2195.962522] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60f56a5d-b417-498f-a7fc-74093fac6de6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.968869] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053525, 'name': CreateVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2195.975156] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f82b53d-a32d-4707-a337-4f2b5a7f8963 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.979469] env[62684]: DEBUG nova.compute.manager [req-449f25c4-be70-40d7-97d9-2f4908412aef req-75c93dc6-d680-40e7-8856-71987473a310 service nova] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Received event network-vif-plugged-0d17b2ea-8e17-456b-87e2-1e2bec93f187 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2195.979723] env[62684]: DEBUG oslo_concurrency.lockutils [req-449f25c4-be70-40d7-97d9-2f4908412aef req-75c93dc6-d680-40e7-8856-71987473a310 service nova] Acquiring lock "ba12fa9a-10e3-4624-98b5-4ff7365e1940-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2195.980208] env[62684]: DEBUG oslo_concurrency.lockutils [req-449f25c4-be70-40d7-97d9-2f4908412aef req-75c93dc6-d680-40e7-8856-71987473a310 service nova] Lock "ba12fa9a-10e3-4624-98b5-4ff7365e1940-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2195.980404] env[62684]: DEBUG oslo_concurrency.lockutils [req-449f25c4-be70-40d7-97d9-2f4908412aef req-75c93dc6-d680-40e7-8856-71987473a310 service nova] Lock "ba12fa9a-10e3-4624-98b5-4ff7365e1940-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2195.980706] env[62684]: DEBUG nova.compute.manager [req-449f25c4-be70-40d7-97d9-2f4908412aef req-75c93dc6-d680-40e7-8856-71987473a310 service nova] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] No waiting events found dispatching network-vif-plugged-0d17b2ea-8e17-456b-87e2-1e2bec93f187 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2195.980802] env[62684]: WARNING nova.compute.manager [req-449f25c4-be70-40d7-97d9-2f4908412aef req-75c93dc6-d680-40e7-8856-71987473a310 service nova] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Received unexpected event network-vif-plugged-0d17b2ea-8e17-456b-87e2-1e2bec93f187 for instance with vm_state building and task_state spawning. [ 2195.980984] env[62684]: DEBUG nova.compute.manager [req-449f25c4-be70-40d7-97d9-2f4908412aef req-75c93dc6-d680-40e7-8856-71987473a310 service nova] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Received event network-changed-0d17b2ea-8e17-456b-87e2-1e2bec93f187 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2195.981165] env[62684]: DEBUG nova.compute.manager [req-449f25c4-be70-40d7-97d9-2f4908412aef req-75c93dc6-d680-40e7-8856-71987473a310 service nova] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Refreshing instance network info cache due to event network-changed-0d17b2ea-8e17-456b-87e2-1e2bec93f187. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2195.981366] env[62684]: DEBUG oslo_concurrency.lockutils [req-449f25c4-be70-40d7-97d9-2f4908412aef req-75c93dc6-d680-40e7-8856-71987473a310 service nova] Acquiring lock "refresh_cache-ba12fa9a-10e3-4624-98b5-4ff7365e1940" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2196.010796] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179290MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=62684) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2196.010980] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2196.023175] env[62684]: DEBUG oslo_vmware.api [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ec7266-00ad-40a3-d08e-c959ecf709fc, 'name': SearchDatastore_Task, 'duration_secs': 0.010921} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2196.023474] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2196.024098] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2196.024098] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2196.024098] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2196.024277] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2196.024513] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c2cb942c-c446-4637-a4f7-7c1cdbb763e1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2196.032814] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2196.033030] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2196.033737] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06b04d41-63c3-47f2-a76a-72745423a94f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2196.039341] env[62684]: DEBUG oslo_vmware.api [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2196.039341] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52cca4bc-3204-c944-c52c-87ad5f85cd1f" [ 2196.039341] env[62684]: _type = "Task" [ 2196.039341] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2196.046726] env[62684]: DEBUG oslo_vmware.api [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52cca4bc-3204-c944-c52c-87ad5f85cd1f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2196.049985] env[62684]: DEBUG nova.compute.manager [req-981a6c94-cd85-4b08-a4b4-6aff17c20323 req-f2b8a5e1-ed40-4e63-aa39-6be9a6bab33e service nova] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Received event network-changed-34ae3a7e-83a9-4ebd-8582-bb73f3050948 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2196.050187] env[62684]: DEBUG nova.compute.manager [req-981a6c94-cd85-4b08-a4b4-6aff17c20323 req-f2b8a5e1-ed40-4e63-aa39-6be9a6bab33e service nova] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Refreshing instance network info cache due to event network-changed-34ae3a7e-83a9-4ebd-8582-bb73f3050948. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2196.050468] env[62684]: DEBUG oslo_concurrency.lockutils [req-981a6c94-cd85-4b08-a4b4-6aff17c20323 req-f2b8a5e1-ed40-4e63-aa39-6be9a6bab33e service nova] Acquiring lock "refresh_cache-264c6900-dbef-455e-95cc-1df73c735cc8" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2196.050621] env[62684]: DEBUG oslo_concurrency.lockutils [req-981a6c94-cd85-4b08-a4b4-6aff17c20323 req-f2b8a5e1-ed40-4e63-aa39-6be9a6bab33e service nova] Acquired lock "refresh_cache-264c6900-dbef-455e-95cc-1df73c735cc8" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2196.050836] env[62684]: DEBUG nova.network.neutron [req-981a6c94-cd85-4b08-a4b4-6aff17c20323 req-f2b8a5e1-ed40-4e63-aa39-6be9a6bab33e service nova] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Refreshing network info cache for port 34ae3a7e-83a9-4ebd-8582-bb73f3050948 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2196.354989] env[62684]: DEBUG oslo_concurrency.lockutils [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Releasing lock "refresh_cache-ba12fa9a-10e3-4624-98b5-4ff7365e1940" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2196.355381] env[62684]: DEBUG nova.compute.manager [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Instance network_info: |[{"id": "0d17b2ea-8e17-456b-87e2-1e2bec93f187", "address": "fa:16:3e:1c:c6:37", "network": {"id": "1751424b-54a9-4879-9f32-aa15a9bb632c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-120070593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "263c101fcc5e493789b79dfd1ba97cc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92e4d027-e755-417b-8eea-9a8f24b85140", "external-id": "nsx-vlan-transportzone-756", "segmentation_id": 756, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d17b2ea-8e", "ovs_interfaceid": "0d17b2ea-8e17-456b-87e2-1e2bec93f187", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2196.355711] env[62684]: DEBUG oslo_concurrency.lockutils [req-449f25c4-be70-40d7-97d9-2f4908412aef req-75c93dc6-d680-40e7-8856-71987473a310 service nova] Acquired lock "refresh_cache-ba12fa9a-10e3-4624-98b5-4ff7365e1940" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2196.355898] env[62684]: DEBUG nova.network.neutron [req-449f25c4-be70-40d7-97d9-2f4908412aef req-75c93dc6-d680-40e7-8856-71987473a310 service nova] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Refreshing network info cache for port 0d17b2ea-8e17-456b-87e2-1e2bec93f187 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2196.357466] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1c:c6:37', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92e4d027-e755-417b-8eea-9a8f24b85140', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0d17b2ea-8e17-456b-87e2-1e2bec93f187', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2196.365222] env[62684]: DEBUG oslo.service.loopingcall [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2196.368134] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2196.368642] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-54a25389-3519-4747-ba09-33fb009eac42 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2196.389712] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2196.389712] env[62684]: value = "task-2053526" [ 2196.389712] env[62684]: _type = "Task" [ 2196.389712] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2196.400201] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053526, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2196.403165] env[62684]: DEBUG oslo_vmware.api [None req-0ce8467d-0c2c-4d62-9cff-69775e7beb75 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053518, 'name': ReconfigVM_Task, 'duration_secs': 6.043399} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2196.403446] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0ce8467d-0c2c-4d62-9cff-69775e7beb75 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Releasing lock "f037d6b2-2082-4611-985e-b9a077eb8250" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2196.403687] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0ce8467d-0c2c-4d62-9cff-69775e7beb75 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Reconfigured VM to detach interface {{(pid=62684) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 2196.429113] env[62684]: DEBUG nova.compute.utils [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2196.430590] env[62684]: DEBUG nova.compute.manager [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2196.431711] env[62684]: DEBUG nova.network.neutron [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2196.460619] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053525, 'name': CreateVM_Task, 'duration_secs': 0.41503} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2196.460785] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2196.461500] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2196.461668] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2196.462188] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2196.464190] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2eb56c91-a8e6-4ea1-b31b-4b25c757c158 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2196.469290] env[62684]: DEBUG oslo_vmware.api [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2196.469290] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52262b60-0010-d407-1f50-e361f1130a5e" [ 2196.469290] env[62684]: _type = "Task" [ 2196.469290] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2196.479559] env[62684]: DEBUG oslo_vmware.api [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52262b60-0010-d407-1f50-e361f1130a5e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2196.501733] env[62684]: DEBUG nova.policy [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '96b96927115d49f2a04342784717e58e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '607a0aa1049640d882d7dd490f5f98ea', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2196.550525] env[62684]: DEBUG oslo_vmware.api [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52cca4bc-3204-c944-c52c-87ad5f85cd1f, 'name': SearchDatastore_Task, 'duration_secs': 0.008283} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2196.551581] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f73e9b10-f520-4392-8df7-993e9dc275c3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2196.559604] env[62684]: DEBUG oslo_vmware.api [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2196.559604] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52bdd0b3-e180-f5e8-232b-51b99dadbb7b" [ 2196.559604] env[62684]: _type = "Task" [ 2196.559604] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2196.568298] env[62684]: DEBUG oslo_vmware.api [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52bdd0b3-e180-f5e8-232b-51b99dadbb7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2196.723659] env[62684]: DEBUG nova.network.neutron [req-449f25c4-be70-40d7-97d9-2f4908412aef req-75c93dc6-d680-40e7-8856-71987473a310 service nova] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Updated VIF entry in instance network info cache for port 0d17b2ea-8e17-456b-87e2-1e2bec93f187. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2196.724013] env[62684]: DEBUG nova.network.neutron [req-449f25c4-be70-40d7-97d9-2f4908412aef req-75c93dc6-d680-40e7-8856-71987473a310 service nova] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Updating instance_info_cache with network_info: [{"id": "0d17b2ea-8e17-456b-87e2-1e2bec93f187", "address": "fa:16:3e:1c:c6:37", "network": {"id": "1751424b-54a9-4879-9f32-aa15a9bb632c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-120070593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "263c101fcc5e493789b79dfd1ba97cc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92e4d027-e755-417b-8eea-9a8f24b85140", "external-id": "nsx-vlan-transportzone-756", "segmentation_id": 756, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d17b2ea-8e", "ovs_interfaceid": "0d17b2ea-8e17-456b-87e2-1e2bec93f187", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2196.888719] env[62684]: DEBUG nova.network.neutron [req-981a6c94-cd85-4b08-a4b4-6aff17c20323 req-f2b8a5e1-ed40-4e63-aa39-6be9a6bab33e service nova] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Updated VIF entry in instance network info cache for port 34ae3a7e-83a9-4ebd-8582-bb73f3050948. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2196.889207] env[62684]: DEBUG nova.network.neutron [req-981a6c94-cd85-4b08-a4b4-6aff17c20323 req-f2b8a5e1-ed40-4e63-aa39-6be9a6bab33e service nova] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Updating instance_info_cache with network_info: [{"id": "34ae3a7e-83a9-4ebd-8582-bb73f3050948", "address": "fa:16:3e:fc:ef:f4", "network": {"id": "7678b347-6a54-4b84-9a4d-b566bbeb1ea4", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-51664912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76d88ac878d44480b3b54b24ab87efa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34ae3a7e-83", "ovs_interfaceid": "34ae3a7e-83a9-4ebd-8582-bb73f3050948", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2196.904681] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053526, 'name': CreateVM_Task, 'duration_secs': 0.299496} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2196.904791] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2196.905480] env[62684]: DEBUG oslo_concurrency.lockutils [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2196.936608] env[62684]: DEBUG nova.compute.manager [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2196.983830] env[62684]: DEBUG oslo_vmware.api [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52262b60-0010-d407-1f50-e361f1130a5e, 'name': SearchDatastore_Task, 'duration_secs': 0.009617} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2196.983830] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2196.983830] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2196.983830] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2196.984129] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2196.988066] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2196.988066] env[62684]: DEBUG oslo_concurrency.lockutils [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2196.988066] env[62684]: DEBUG oslo_concurrency.lockutils [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2196.988066] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3908fdb1-9554-413f-9708-68370222b0b0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2196.990636] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e39abcc-d02b-443e-98cb-b57627eaa988 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2196.996231] env[62684]: DEBUG oslo_vmware.api [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2196.996231] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]524c32d4-acfd-f9d3-c05c-3a7f4c4c0396" [ 2196.996231] env[62684]: _type = "Task" [ 2196.996231] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2196.999685] env[62684]: DEBUG nova.network.neutron [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Successfully created port: 3f61101e-05cd-4c60-ad9a-8a272d5e8879 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2197.002536] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2197.002723] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2197.006186] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70307ab2-45f8-4294-aad5-9ed06cc44d94 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.012303] env[62684]: DEBUG oslo_vmware.api [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]524c32d4-acfd-f9d3-c05c-3a7f4c4c0396, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2197.016280] env[62684]: DEBUG oslo_vmware.api [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2197.016280] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ca24c2-bb6a-4bae-cd12-f7a0bb3c740f" [ 2197.016280] env[62684]: _type = "Task" [ 2197.016280] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2197.023702] env[62684]: DEBUG oslo_vmware.api [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ca24c2-bb6a-4bae-cd12-f7a0bb3c740f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2197.071408] env[62684]: DEBUG oslo_vmware.api [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52bdd0b3-e180-f5e8-232b-51b99dadbb7b, 'name': SearchDatastore_Task, 'duration_secs': 0.009403} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2197.071681] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2197.071953] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 99a9653c-7221-4495-be5f-5441dc8da0f4/99a9653c-7221-4495-be5f-5441dc8da0f4.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2197.072234] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9e1a2a64-e769-4dda-a35a-0970a3f2b21f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.080285] env[62684]: DEBUG oslo_vmware.api [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2197.080285] env[62684]: value = "task-2053527" [ 2197.080285] env[62684]: _type = "Task" [ 2197.080285] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2197.088016] env[62684]: DEBUG oslo_vmware.api [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053527, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2197.209577] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e9597f1-f95d-45b5-a8c6-3acba45d78aa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.217782] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42cbbd54-d8c8-47c9-ad77-7218a9e79fa4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.249083] env[62684]: DEBUG oslo_concurrency.lockutils [req-449f25c4-be70-40d7-97d9-2f4908412aef req-75c93dc6-d680-40e7-8856-71987473a310 service nova] Releasing lock "refresh_cache-ba12fa9a-10e3-4624-98b5-4ff7365e1940" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2197.250395] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-415bb8f1-4ca1-4d1f-ae7c-7710af357825 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.258512] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e723a7a5-09cb-467c-8dc1-eed9469f4843 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.275726] env[62684]: DEBUG nova.compute.provider_tree [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2197.398772] env[62684]: DEBUG oslo_concurrency.lockutils [req-981a6c94-cd85-4b08-a4b4-6aff17c20323 req-f2b8a5e1-ed40-4e63-aa39-6be9a6bab33e service nova] Releasing lock "refresh_cache-264c6900-dbef-455e-95cc-1df73c735cc8" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2197.507559] env[62684]: DEBUG oslo_vmware.api [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]524c32d4-acfd-f9d3-c05c-3a7f4c4c0396, 'name': SearchDatastore_Task, 'duration_secs': 0.018017} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2197.507751] env[62684]: DEBUG oslo_concurrency.lockutils [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2197.507971] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2197.508196] env[62684]: DEBUG oslo_concurrency.lockutils [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2197.524574] env[62684]: DEBUG oslo_vmware.api [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ca24c2-bb6a-4bae-cd12-f7a0bb3c740f, 'name': SearchDatastore_Task, 'duration_secs': 0.008015} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2197.525317] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5405b147-1b4f-4433-8854-4cdb13f9a3c7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.531758] env[62684]: DEBUG oslo_vmware.api [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2197.531758] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c2bdca-141e-6c61-15d1-df45e73a1fd2" [ 2197.531758] env[62684]: _type = "Task" [ 2197.531758] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2197.539690] env[62684]: DEBUG oslo_vmware.api [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c2bdca-141e-6c61-15d1-df45e73a1fd2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2197.590014] env[62684]: DEBUG oslo_vmware.api [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053527, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.440982} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2197.590275] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 99a9653c-7221-4495-be5f-5441dc8da0f4/99a9653c-7221-4495-be5f-5441dc8da0f4.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2197.590488] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2197.590728] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e2008bff-8ed5-485b-8917-26445c66b348 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.597109] env[62684]: DEBUG oslo_vmware.api [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2197.597109] env[62684]: value = "task-2053528" [ 2197.597109] env[62684]: _type = "Task" [ 2197.597109] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2197.604954] env[62684]: DEBUG oslo_vmware.api [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053528, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2197.779490] env[62684]: DEBUG nova.scheduler.client.report [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2197.838240] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0ce8467d-0c2c-4d62-9cff-69775e7beb75 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "refresh_cache-f037d6b2-2082-4611-985e-b9a077eb8250" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2197.838448] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0ce8467d-0c2c-4d62-9cff-69775e7beb75 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquired lock "refresh_cache-f037d6b2-2082-4611-985e-b9a077eb8250" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2197.838630] env[62684]: DEBUG nova.network.neutron [None req-0ce8467d-0c2c-4d62-9cff-69775e7beb75 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2197.946364] env[62684]: DEBUG nova.compute.manager [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2197.979150] env[62684]: DEBUG nova.virt.hardware [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2197.979432] env[62684]: DEBUG nova.virt.hardware [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2197.979596] env[62684]: DEBUG nova.virt.hardware [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2197.979788] env[62684]: DEBUG nova.virt.hardware [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2197.979955] env[62684]: DEBUG nova.virt.hardware [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2197.980119] env[62684]: DEBUG nova.virt.hardware [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2197.980335] env[62684]: DEBUG nova.virt.hardware [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2197.980500] env[62684]: DEBUG nova.virt.hardware [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2197.980670] env[62684]: DEBUG nova.virt.hardware [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2197.980847] env[62684]: DEBUG nova.virt.hardware [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2197.981043] env[62684]: DEBUG nova.virt.hardware [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2197.982213] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f318a283-6da9-485c-8761-ee492138c7b5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.990645] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bce7f8e-dd95-4c52-9e85-bde02defe1eb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.041785] env[62684]: DEBUG oslo_vmware.api [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c2bdca-141e-6c61-15d1-df45e73a1fd2, 'name': SearchDatastore_Task, 'duration_secs': 0.009504} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2198.042153] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2198.042670] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 264c6900-dbef-455e-95cc-1df73c735cc8/264c6900-dbef-455e-95cc-1df73c735cc8.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2198.042846] env[62684]: DEBUG oslo_concurrency.lockutils [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2198.043189] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2198.044994] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9a6852b8-eac1-4f51-a2d9-44c138bc1114 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.046266] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-45ad0c43-49d2-4c97-a2f4-afe0d2dc1e16 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.052390] env[62684]: DEBUG oslo_vmware.api [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2198.052390] env[62684]: value = "task-2053529" [ 2198.052390] env[62684]: _type = "Task" [ 2198.052390] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2198.056484] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2198.056722] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2198.057775] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8102b7b2-1188-414f-b5d4-b4c51b76abc6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.065535] env[62684]: DEBUG oslo_vmware.api [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053529, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2198.068323] env[62684]: DEBUG oslo_vmware.api [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2198.068323] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523650fe-1d67-d36d-06b5-5739698c39b0" [ 2198.068323] env[62684]: _type = "Task" [ 2198.068323] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2198.077769] env[62684]: DEBUG oslo_vmware.api [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523650fe-1d67-d36d-06b5-5739698c39b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2198.106931] env[62684]: DEBUG oslo_vmware.api [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053528, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059592} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2198.107260] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2198.108045] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-609034cc-d4fe-4a01-94c8-1fc10cb7af8c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.132946] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] 99a9653c-7221-4495-be5f-5441dc8da0f4/99a9653c-7221-4495-be5f-5441dc8da0f4.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2198.133103] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-31705f42-252e-4e69-ab35-39760a0e4580 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.154094] env[62684]: DEBUG oslo_vmware.api [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2198.154094] env[62684]: value = "task-2053530" [ 2198.154094] env[62684]: _type = "Task" [ 2198.154094] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2198.163547] env[62684]: DEBUG oslo_vmware.api [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053530, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2198.166879] env[62684]: DEBUG nova.compute.manager [req-a8eb5917-712d-4c96-8b10-41423e60cb40 req-1d6047b8-37a9-4583-8192-b912733a268a service nova] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Received event network-changed-b5747949-00d7-4815-9080-52285a6a8813 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2198.166879] env[62684]: DEBUG nova.compute.manager [req-a8eb5917-712d-4c96-8b10-41423e60cb40 req-1d6047b8-37a9-4583-8192-b912733a268a service nova] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Refreshing instance network info cache due to event network-changed-b5747949-00d7-4815-9080-52285a6a8813. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2198.166879] env[62684]: DEBUG oslo_concurrency.lockutils [req-a8eb5917-712d-4c96-8b10-41423e60cb40 req-1d6047b8-37a9-4583-8192-b912733a268a service nova] Acquiring lock "refresh_cache-f037d6b2-2082-4611-985e-b9a077eb8250" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2198.284613] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.357s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2198.285203] env[62684]: DEBUG nova.compute.manager [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2198.288553] env[62684]: DEBUG oslo_concurrency.lockutils [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.211s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2198.290124] env[62684]: INFO nova.compute.claims [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2198.563156] env[62684]: DEBUG oslo_vmware.api [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053529, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.455793} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2198.563567] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 264c6900-dbef-455e-95cc-1df73c735cc8/264c6900-dbef-455e-95cc-1df73c735cc8.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2198.563972] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2198.564105] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-627e6b4e-46e6-467b-af34-261da6cd4587 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.575339] env[62684]: INFO nova.network.neutron [None req-0ce8467d-0c2c-4d62-9cff-69775e7beb75 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Port e60b8515-c469-46d3-945b-bc843ccffc44 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 2198.575698] env[62684]: DEBUG nova.network.neutron [None req-0ce8467d-0c2c-4d62-9cff-69775e7beb75 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Updating instance_info_cache with network_info: [{"id": "b5747949-00d7-4815-9080-52285a6a8813", "address": "fa:16:3e:fd:34:0c", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5747949-00", "ovs_interfaceid": "b5747949-00d7-4815-9080-52285a6a8813", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2198.578313] env[62684]: DEBUG oslo_vmware.api [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2198.578313] env[62684]: value = "task-2053531" [ 2198.578313] env[62684]: _type = "Task" [ 2198.578313] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2198.600236] env[62684]: DEBUG oslo_vmware.api [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523650fe-1d67-d36d-06b5-5739698c39b0, 'name': SearchDatastore_Task, 'duration_secs': 0.016335} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2198.605273] env[62684]: DEBUG oslo_vmware.api [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053531, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2198.606748] env[62684]: DEBUG nova.compute.manager [req-0662c938-ac85-4705-87fb-5338362cad33 req-759e4b3d-7366-44dc-9fc1-9bb4289fa2dc service nova] [instance: 02480039-f749-402a-92db-df664304a5bf] Received event network-vif-plugged-3f61101e-05cd-4c60-ad9a-8a272d5e8879 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2198.607010] env[62684]: DEBUG oslo_concurrency.lockutils [req-0662c938-ac85-4705-87fb-5338362cad33 req-759e4b3d-7366-44dc-9fc1-9bb4289fa2dc service nova] Acquiring lock "02480039-f749-402a-92db-df664304a5bf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2198.607266] env[62684]: DEBUG oslo_concurrency.lockutils [req-0662c938-ac85-4705-87fb-5338362cad33 req-759e4b3d-7366-44dc-9fc1-9bb4289fa2dc service nova] Lock "02480039-f749-402a-92db-df664304a5bf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2198.607461] env[62684]: DEBUG oslo_concurrency.lockutils [req-0662c938-ac85-4705-87fb-5338362cad33 req-759e4b3d-7366-44dc-9fc1-9bb4289fa2dc service nova] Lock "02480039-f749-402a-92db-df664304a5bf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2198.607663] env[62684]: DEBUG nova.compute.manager [req-0662c938-ac85-4705-87fb-5338362cad33 req-759e4b3d-7366-44dc-9fc1-9bb4289fa2dc service nova] [instance: 02480039-f749-402a-92db-df664304a5bf] No waiting events found dispatching network-vif-plugged-3f61101e-05cd-4c60-ad9a-8a272d5e8879 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2198.607856] env[62684]: WARNING nova.compute.manager [req-0662c938-ac85-4705-87fb-5338362cad33 req-759e4b3d-7366-44dc-9fc1-9bb4289fa2dc service nova] [instance: 02480039-f749-402a-92db-df664304a5bf] Received unexpected event network-vif-plugged-3f61101e-05cd-4c60-ad9a-8a272d5e8879 for instance with vm_state building and task_state spawning. [ 2198.608182] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2324afe2-e42b-471b-b6e8-b6ed78100a5c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.614687] env[62684]: DEBUG oslo_vmware.api [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2198.614687] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523e407c-d22a-ce61-2923-9a38115113f0" [ 2198.614687] env[62684]: _type = "Task" [ 2198.614687] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2198.624036] env[62684]: DEBUG oslo_vmware.api [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523e407c-d22a-ce61-2923-9a38115113f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2198.666778] env[62684]: DEBUG oslo_vmware.api [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053530, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2198.680392] env[62684]: DEBUG nova.network.neutron [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Successfully updated port: 3f61101e-05cd-4c60-ad9a-8a272d5e8879 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2198.795196] env[62684]: DEBUG nova.compute.utils [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2198.799031] env[62684]: DEBUG nova.compute.manager [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2198.799206] env[62684]: DEBUG nova.network.neutron [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2198.839665] env[62684]: DEBUG nova.policy [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bbc73652c6414c0484229c420ae77f14', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a21c5c0324ff44789b5722eaea3baf1d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2198.841815] env[62684]: DEBUG oslo_concurrency.lockutils [None req-391b6161-b6e5-436d-93b6-a226667cae71 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "interface-d7f09d0e-f7b6-415e-8d82-47eba1153aa1-e60b8515-c469-46d3-945b-bc843ccffc44" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2198.842083] env[62684]: DEBUG oslo_concurrency.lockutils [None req-391b6161-b6e5-436d-93b6-a226667cae71 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "interface-d7f09d0e-f7b6-415e-8d82-47eba1153aa1-e60b8515-c469-46d3-945b-bc843ccffc44" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2198.842468] env[62684]: DEBUG nova.objects.instance [None req-391b6161-b6e5-436d-93b6-a226667cae71 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lazy-loading 'flavor' on Instance uuid d7f09d0e-f7b6-415e-8d82-47eba1153aa1 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2199.079439] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0ce8467d-0c2c-4d62-9cff-69775e7beb75 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Releasing lock "refresh_cache-f037d6b2-2082-4611-985e-b9a077eb8250" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2199.081751] env[62684]: DEBUG oslo_concurrency.lockutils [req-a8eb5917-712d-4c96-8b10-41423e60cb40 req-1d6047b8-37a9-4583-8192-b912733a268a service nova] Acquired lock "refresh_cache-f037d6b2-2082-4611-985e-b9a077eb8250" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2199.081961] env[62684]: DEBUG nova.network.neutron [req-a8eb5917-712d-4c96-8b10-41423e60cb40 req-1d6047b8-37a9-4583-8192-b912733a268a service nova] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Refreshing network info cache for port b5747949-00d7-4815-9080-52285a6a8813 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2199.103786] env[62684]: DEBUG oslo_vmware.api [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053531, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.101339} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2199.104802] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2199.104802] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9b6baba-2e5b-42a9-8bd5-fbd9121e4ebc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.127352] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] 264c6900-dbef-455e-95cc-1df73c735cc8/264c6900-dbef-455e-95cc-1df73c735cc8.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2199.128234] env[62684]: DEBUG nova.network.neutron [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Successfully created port: d95acbd4-4387-4053-9921-da1c96923d5c {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2199.133810] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c39a72d6-6ced-4c0e-8a49-637a67dcee08 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.154436] env[62684]: DEBUG oslo_vmware.api [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523e407c-d22a-ce61-2923-9a38115113f0, 'name': SearchDatastore_Task, 'duration_secs': 0.009051} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2199.155836] env[62684]: DEBUG oslo_concurrency.lockutils [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2199.156034] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] ba12fa9a-10e3-4624-98b5-4ff7365e1940/ba12fa9a-10e3-4624-98b5-4ff7365e1940.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2199.156478] env[62684]: DEBUG oslo_vmware.api [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2199.156478] env[62684]: value = "task-2053532" [ 2199.156478] env[62684]: _type = "Task" [ 2199.156478] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2199.156962] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6505313b-aee5-466c-860b-15172a942aab {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.169454] env[62684]: DEBUG oslo_vmware.api [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053530, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2199.173599] env[62684]: DEBUG oslo_vmware.api [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053532, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2199.173934] env[62684]: DEBUG oslo_vmware.api [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2199.173934] env[62684]: value = "task-2053533" [ 2199.173934] env[62684]: _type = "Task" [ 2199.173934] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2199.182215] env[62684]: DEBUG oslo_vmware.api [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053533, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2199.183146] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "refresh_cache-02480039-f749-402a-92db-df664304a5bf" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2199.183146] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquired lock "refresh_cache-02480039-f749-402a-92db-df664304a5bf" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2199.183146] env[62684]: DEBUG nova.network.neutron [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2199.299844] env[62684]: DEBUG nova.compute.manager [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2199.468909] env[62684]: DEBUG nova.objects.instance [None req-391b6161-b6e5-436d-93b6-a226667cae71 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lazy-loading 'pci_requests' on Instance uuid d7f09d0e-f7b6-415e-8d82-47eba1153aa1 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2199.577738] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-688a6ee9-8165-4b1a-9307-8ffe1016235d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.586072] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e9b18db-0ded-45ae-be20-e366e7caea50 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.592099] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0ce8467d-0c2c-4d62-9cff-69775e7beb75 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "interface-f037d6b2-2082-4611-985e-b9a077eb8250-e60b8515-c469-46d3-945b-bc843ccffc44" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.288s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2199.637951] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34495060-024a-4912-97a0-ffb34ef8f971 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.648684] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ed6d677-71b5-43cd-9a0b-b0ec9ba4b960 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.665856] env[62684]: DEBUG nova.compute.provider_tree [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2199.684329] env[62684]: DEBUG oslo_vmware.api [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053530, 'name': ReconfigVM_Task, 'duration_secs': 1.320162} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2199.685529] env[62684]: DEBUG oslo_vmware.api [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053532, 'name': ReconfigVM_Task, 'duration_secs': 0.503465} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2199.686252] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Reconfigured VM instance instance-0000005f to attach disk [datastore2] 99a9653c-7221-4495-be5f-5441dc8da0f4/99a9653c-7221-4495-be5f-5441dc8da0f4.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2199.688783] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Reconfigured VM instance instance-00000061 to attach disk [datastore1] 264c6900-dbef-455e-95cc-1df73c735cc8/264c6900-dbef-455e-95cc-1df73c735cc8.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2199.692330] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fd3010aa-7157-4e04-a639-de6ef7b9f150 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.694113] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-525770ee-9ce8-4d37-870a-cfb6ec9cc278 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.695589] env[62684]: DEBUG oslo_vmware.api [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053533, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.484313} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2199.701018] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] ba12fa9a-10e3-4624-98b5-4ff7365e1940/ba12fa9a-10e3-4624-98b5-4ff7365e1940.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2199.701018] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2199.701018] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d399987e-899c-4328-89f2-00998bf51632 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.703769] env[62684]: DEBUG oslo_vmware.api [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2199.703769] env[62684]: value = "task-2053534" [ 2199.703769] env[62684]: _type = "Task" [ 2199.703769] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2199.704027] env[62684]: DEBUG oslo_vmware.api [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2199.704027] env[62684]: value = "task-2053535" [ 2199.704027] env[62684]: _type = "Task" [ 2199.704027] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2199.711053] env[62684]: DEBUG oslo_vmware.api [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2199.711053] env[62684]: value = "task-2053536" [ 2199.711053] env[62684]: _type = "Task" [ 2199.711053] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2199.720416] env[62684]: DEBUG oslo_vmware.api [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053535, 'name': Rename_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2199.720658] env[62684]: DEBUG oslo_vmware.api [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053534, 'name': Rename_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2199.726249] env[62684]: DEBUG oslo_vmware.api [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053536, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2199.755482] env[62684]: DEBUG nova.network.neutron [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2199.965760] env[62684]: DEBUG nova.network.neutron [req-a8eb5917-712d-4c96-8b10-41423e60cb40 req-1d6047b8-37a9-4583-8192-b912733a268a service nova] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Updated VIF entry in instance network info cache for port b5747949-00d7-4815-9080-52285a6a8813. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2199.966153] env[62684]: DEBUG nova.network.neutron [req-a8eb5917-712d-4c96-8b10-41423e60cb40 req-1d6047b8-37a9-4583-8192-b912733a268a service nova] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Updating instance_info_cache with network_info: [{"id": "b5747949-00d7-4815-9080-52285a6a8813", "address": "fa:16:3e:fd:34:0c", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5747949-00", "ovs_interfaceid": "b5747949-00d7-4815-9080-52285a6a8813", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2199.971114] env[62684]: DEBUG nova.objects.base [None req-391b6161-b6e5-436d-93b6-a226667cae71 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=62684) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2199.971453] env[62684]: DEBUG nova.network.neutron [None req-391b6161-b6e5-436d-93b6-a226667cae71 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2199.999479] env[62684]: DEBUG nova.network.neutron [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Updating instance_info_cache with network_info: [{"id": "3f61101e-05cd-4c60-ad9a-8a272d5e8879", "address": "fa:16:3e:f8:fd:88", "network": {"id": "b24dd0c0-a394-4ca6-a79a-94535bc1df6f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2023102141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "607a0aa1049640d882d7dd490f5f98ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f61101e-05", "ovs_interfaceid": "3f61101e-05cd-4c60-ad9a-8a272d5e8879", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2200.044486] env[62684]: DEBUG nova.policy [None req-391b6161-b6e5-436d-93b6-a226667cae71 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e957449ae9d24bdaba38b3db704d3d61', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5cb4900a999e467bafdfd1fb407a82f4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2200.174455] env[62684]: DEBUG nova.scheduler.client.report [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2200.221553] env[62684]: DEBUG oslo_vmware.api [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053534, 'name': Rename_Task, 'duration_secs': 0.153831} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2200.221815] env[62684]: DEBUG oslo_vmware.api [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053535, 'name': Rename_Task, 'duration_secs': 0.159414} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2200.222476] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2200.222795] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2200.223074] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-32ec3c74-630f-4a4b-9f0f-1957919b873f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.224640] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-12622fa7-dd5e-42b9-bfae-253cecc5d382 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.229268] env[62684]: DEBUG oslo_vmware.api [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053536, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078101} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2200.230973] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2200.230973] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f877abd-3b49-4cc6-af8f-a67c1bee4eb7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.234243] env[62684]: DEBUG oslo_vmware.api [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2200.234243] env[62684]: value = "task-2053538" [ 2200.234243] env[62684]: _type = "Task" [ 2200.234243] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2200.235712] env[62684]: DEBUG oslo_vmware.api [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2200.235712] env[62684]: value = "task-2053537" [ 2200.235712] env[62684]: _type = "Task" [ 2200.235712] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2200.263583] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] ba12fa9a-10e3-4624-98b5-4ff7365e1940/ba12fa9a-10e3-4624-98b5-4ff7365e1940.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2200.270690] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-990d9078-71fb-4cde-848e-946287c45adf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.286842] env[62684]: DEBUG oslo_vmware.api [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053538, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2200.287172] env[62684]: DEBUG oslo_vmware.api [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053537, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2200.289337] env[62684]: DEBUG nova.compute.manager [req-608e908c-c05e-424d-8b55-b96adcb78d70 req-a6854ca2-d2af-4d37-b2ed-808f5a8fc340 service nova] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Received event network-changed-efda8f4d-97b4-44f8-b30b-d26145e98e58 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2200.289613] env[62684]: DEBUG nova.compute.manager [req-608e908c-c05e-424d-8b55-b96adcb78d70 req-a6854ca2-d2af-4d37-b2ed-808f5a8fc340 service nova] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Refreshing instance network info cache due to event network-changed-efda8f4d-97b4-44f8-b30b-d26145e98e58. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2200.289747] env[62684]: DEBUG oslo_concurrency.lockutils [req-608e908c-c05e-424d-8b55-b96adcb78d70 req-a6854ca2-d2af-4d37-b2ed-808f5a8fc340 service nova] Acquiring lock "refresh_cache-d7f09d0e-f7b6-415e-8d82-47eba1153aa1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2200.289899] env[62684]: DEBUG oslo_concurrency.lockutils [req-608e908c-c05e-424d-8b55-b96adcb78d70 req-a6854ca2-d2af-4d37-b2ed-808f5a8fc340 service nova] Acquired lock "refresh_cache-d7f09d0e-f7b6-415e-8d82-47eba1153aa1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2200.290081] env[62684]: DEBUG nova.network.neutron [req-608e908c-c05e-424d-8b55-b96adcb78d70 req-a6854ca2-d2af-4d37-b2ed-808f5a8fc340 service nova] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Refreshing network info cache for port efda8f4d-97b4-44f8-b30b-d26145e98e58 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2200.296110] env[62684]: DEBUG oslo_vmware.api [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2200.296110] env[62684]: value = "task-2053539" [ 2200.296110] env[62684]: _type = "Task" [ 2200.296110] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2200.305075] env[62684]: DEBUG oslo_vmware.api [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053539, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2200.311856] env[62684]: DEBUG nova.compute.manager [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2200.343227] env[62684]: DEBUG nova.virt.hardware [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2200.343498] env[62684]: DEBUG nova.virt.hardware [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2200.343661] env[62684]: DEBUG nova.virt.hardware [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2200.343852] env[62684]: DEBUG nova.virt.hardware [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2200.344027] env[62684]: DEBUG nova.virt.hardware [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2200.344228] env[62684]: DEBUG nova.virt.hardware [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2200.344613] env[62684]: DEBUG nova.virt.hardware [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2200.344613] env[62684]: DEBUG nova.virt.hardware [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2200.344783] env[62684]: DEBUG nova.virt.hardware [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2200.344953] env[62684]: DEBUG nova.virt.hardware [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2200.345165] env[62684]: DEBUG nova.virt.hardware [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2200.346045] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2d74e7a-4840-4f40-aca7-739bee01c18a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.355098] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43e412a4-c559-44e2-b8e7-085209b1126d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.468757] env[62684]: DEBUG oslo_concurrency.lockutils [req-a8eb5917-712d-4c96-8b10-41423e60cb40 req-1d6047b8-37a9-4583-8192-b912733a268a service nova] Releasing lock "refresh_cache-f037d6b2-2082-4611-985e-b9a077eb8250" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2200.501819] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Releasing lock "refresh_cache-02480039-f749-402a-92db-df664304a5bf" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2200.502165] env[62684]: DEBUG nova.compute.manager [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Instance network_info: |[{"id": "3f61101e-05cd-4c60-ad9a-8a272d5e8879", "address": "fa:16:3e:f8:fd:88", "network": {"id": "b24dd0c0-a394-4ca6-a79a-94535bc1df6f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2023102141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "607a0aa1049640d882d7dd490f5f98ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f61101e-05", "ovs_interfaceid": "3f61101e-05cd-4c60-ad9a-8a272d5e8879", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2200.502613] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f8:fd:88', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3f61101e-05cd-4c60-ad9a-8a272d5e8879', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2200.511059] env[62684]: DEBUG oslo.service.loopingcall [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2200.511313] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02480039-f749-402a-92db-df664304a5bf] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2200.511524] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3f1713a6-6701-4029-b927-08a59325b66e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.532606] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2200.532606] env[62684]: value = "task-2053540" [ 2200.532606] env[62684]: _type = "Task" [ 2200.532606] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2200.540169] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053540, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2200.667751] env[62684]: DEBUG nova.compute.manager [req-2cdc47c2-3056-4a92-9792-0f6fb601d536 req-8bb66afa-2098-4eaf-bef2-51051d053285 service nova] [instance: 02480039-f749-402a-92db-df664304a5bf] Received event network-changed-3f61101e-05cd-4c60-ad9a-8a272d5e8879 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2200.667984] env[62684]: DEBUG nova.compute.manager [req-2cdc47c2-3056-4a92-9792-0f6fb601d536 req-8bb66afa-2098-4eaf-bef2-51051d053285 service nova] [instance: 02480039-f749-402a-92db-df664304a5bf] Refreshing instance network info cache due to event network-changed-3f61101e-05cd-4c60-ad9a-8a272d5e8879. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2200.668235] env[62684]: DEBUG oslo_concurrency.lockutils [req-2cdc47c2-3056-4a92-9792-0f6fb601d536 req-8bb66afa-2098-4eaf-bef2-51051d053285 service nova] Acquiring lock "refresh_cache-02480039-f749-402a-92db-df664304a5bf" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2200.668390] env[62684]: DEBUG oslo_concurrency.lockutils [req-2cdc47c2-3056-4a92-9792-0f6fb601d536 req-8bb66afa-2098-4eaf-bef2-51051d053285 service nova] Acquired lock "refresh_cache-02480039-f749-402a-92db-df664304a5bf" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2200.668554] env[62684]: DEBUG nova.network.neutron [req-2cdc47c2-3056-4a92-9792-0f6fb601d536 req-8bb66afa-2098-4eaf-bef2-51051d053285 service nova] [instance: 02480039-f749-402a-92db-df664304a5bf] Refreshing network info cache for port 3f61101e-05cd-4c60-ad9a-8a272d5e8879 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2200.679714] env[62684]: DEBUG oslo_concurrency.lockutils [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.391s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2200.680273] env[62684]: DEBUG nova.compute.manager [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2200.682654] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6a301a5d-eb03-4600-bdb0-0ca3153101f9 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.254s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2200.684628] env[62684]: DEBUG nova.objects.instance [None req-6a301a5d-eb03-4600-bdb0-0ca3153101f9 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lazy-loading 'resources' on Instance uuid f4fab142-8066-43c1-abaa-a9f66775114c {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2200.749741] env[62684]: DEBUG oslo_vmware.api [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053537, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2200.752820] env[62684]: DEBUG oslo_vmware.api [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053538, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2200.806962] env[62684]: DEBUG oslo_vmware.api [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053539, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2200.916232] env[62684]: DEBUG nova.network.neutron [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Successfully updated port: d95acbd4-4387-4053-9921-da1c96923d5c {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2201.042545] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053540, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2201.049511] env[62684]: DEBUG nova.network.neutron [req-608e908c-c05e-424d-8b55-b96adcb78d70 req-a6854ca2-d2af-4d37-b2ed-808f5a8fc340 service nova] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Updated VIF entry in instance network info cache for port efda8f4d-97b4-44f8-b30b-d26145e98e58. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2201.049883] env[62684]: DEBUG nova.network.neutron [req-608e908c-c05e-424d-8b55-b96adcb78d70 req-a6854ca2-d2af-4d37-b2ed-808f5a8fc340 service nova] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Updating instance_info_cache with network_info: [{"id": "efda8f4d-97b4-44f8-b30b-d26145e98e58", "address": "fa:16:3e:be:97:33", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.240", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefda8f4d-97", "ovs_interfaceid": "efda8f4d-97b4-44f8-b30b-d26145e98e58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2201.185925] env[62684]: DEBUG nova.compute.utils [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2201.191426] env[62684]: DEBUG nova.compute.manager [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2201.191616] env[62684]: DEBUG nova.network.neutron [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2201.237870] env[62684]: DEBUG nova.policy [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0f1bf470b70a4e8baa9cad549c1ca93c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3c0a5eb59cda47c2b3f4c6d6a4e58bca', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2201.258497] env[62684]: DEBUG oslo_vmware.api [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053538, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2201.258800] env[62684]: DEBUG oslo_vmware.api [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053537, 'name': PowerOnVM_Task} progress is 81%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2201.310826] env[62684]: DEBUG oslo_vmware.api [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053539, 'name': ReconfigVM_Task, 'duration_secs': 0.993755} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2201.311180] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Reconfigured VM instance instance-00000062 to attach disk [datastore1] ba12fa9a-10e3-4624-98b5-4ff7365e1940/ba12fa9a-10e3-4624-98b5-4ff7365e1940.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2201.311889] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-76ccd7f5-2954-4735-abff-e7fa95961147 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2201.321988] env[62684]: DEBUG oslo_vmware.api [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2201.321988] env[62684]: value = "task-2053541" [ 2201.321988] env[62684]: _type = "Task" [ 2201.321988] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2201.335051] env[62684]: DEBUG oslo_vmware.api [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053541, 'name': Rename_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2201.415388] env[62684]: DEBUG nova.network.neutron [req-2cdc47c2-3056-4a92-9792-0f6fb601d536 req-8bb66afa-2098-4eaf-bef2-51051d053285 service nova] [instance: 02480039-f749-402a-92db-df664304a5bf] Updated VIF entry in instance network info cache for port 3f61101e-05cd-4c60-ad9a-8a272d5e8879. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2201.415806] env[62684]: DEBUG nova.network.neutron [req-2cdc47c2-3056-4a92-9792-0f6fb601d536 req-8bb66afa-2098-4eaf-bef2-51051d053285 service nova] [instance: 02480039-f749-402a-92db-df664304a5bf] Updating instance_info_cache with network_info: [{"id": "3f61101e-05cd-4c60-ad9a-8a272d5e8879", "address": "fa:16:3e:f8:fd:88", "network": {"id": "b24dd0c0-a394-4ca6-a79a-94535bc1df6f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2023102141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "607a0aa1049640d882d7dd490f5f98ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f61101e-05", "ovs_interfaceid": "3f61101e-05cd-4c60-ad9a-8a272d5e8879", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2201.418915] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Acquiring lock "refresh_cache-3ff55331-6d5c-4558-b932-e266670f2ac9" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2201.419033] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Acquired lock "refresh_cache-3ff55331-6d5c-4558-b932-e266670f2ac9" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2201.419148] env[62684]: DEBUG nova.network.neutron [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2201.500914] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42c2a76d-cf2e-4830-b50b-6fad25ae11bb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2201.510750] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb1878f8-3096-432a-bb36-666a3ff390a4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2201.548229] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-558fe0a6-026f-4547-8d62-2e1bc850efd7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2201.551976] env[62684]: DEBUG oslo_concurrency.lockutils [req-608e908c-c05e-424d-8b55-b96adcb78d70 req-a6854ca2-d2af-4d37-b2ed-808f5a8fc340 service nova] Releasing lock "refresh_cache-d7f09d0e-f7b6-415e-8d82-47eba1153aa1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2201.559659] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18064e28-0f5e-4f1c-a370-de411be5f72e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2201.563786] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053540, 'name': CreateVM_Task, 'duration_secs': 0.805932} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2201.564035] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02480039-f749-402a-92db-df664304a5bf] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2201.565554] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2201.565861] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2201.566303] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2201.575459] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92c7e917-5dd1-46cd-8f34-85bc5525237a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2201.578302] env[62684]: DEBUG nova.compute.provider_tree [None req-6a301a5d-eb03-4600-bdb0-0ca3153101f9 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2201.583457] env[62684]: DEBUG oslo_vmware.api [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2201.583457] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52dae987-f477-b9a3-faee-a0e8487bbd67" [ 2201.583457] env[62684]: _type = "Task" [ 2201.583457] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2201.592405] env[62684]: DEBUG oslo_vmware.api [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52dae987-f477-b9a3-faee-a0e8487bbd67, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2201.667892] env[62684]: DEBUG nova.network.neutron [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Successfully created port: e9e4e88f-b0c7-406a-b45c-3fd6cfcb854b {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2201.695768] env[62684]: DEBUG nova.compute.manager [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2201.746552] env[62684]: DEBUG oslo_vmware.api [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053538, 'name': PowerOnVM_Task, 'duration_secs': 1.108828} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2201.750065] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2201.750065] env[62684]: INFO nova.compute.manager [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Took 11.47 seconds to spawn the instance on the hypervisor. [ 2201.750213] env[62684]: DEBUG nova.compute.manager [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2201.750972] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6b308cb-4a25-4534-9bb9-c615619f0c0c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2201.760386] env[62684]: DEBUG oslo_vmware.api [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053537, 'name': PowerOnVM_Task, 'duration_secs': 1.154315} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2201.761855] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2201.762081] env[62684]: DEBUG nova.compute.manager [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2201.768018] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc1edee5-e933-49bb-8447-3fe6fc49b5da {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2201.831631] env[62684]: DEBUG oslo_vmware.api [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053541, 'name': Rename_Task, 'duration_secs': 0.179075} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2201.831971] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2201.832245] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b49eea28-434a-48cf-a270-118967120164 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2201.839084] env[62684]: DEBUG oslo_vmware.api [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2201.839084] env[62684]: value = "task-2053542" [ 2201.839084] env[62684]: _type = "Task" [ 2201.839084] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2201.846403] env[62684]: DEBUG oslo_vmware.api [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053542, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2201.918844] env[62684]: DEBUG oslo_concurrency.lockutils [req-2cdc47c2-3056-4a92-9792-0f6fb601d536 req-8bb66afa-2098-4eaf-bef2-51051d053285 service nova] Releasing lock "refresh_cache-02480039-f749-402a-92db-df664304a5bf" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2201.961655] env[62684]: DEBUG nova.network.neutron [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2202.082313] env[62684]: DEBUG nova.scheduler.client.report [None req-6a301a5d-eb03-4600-bdb0-0ca3153101f9 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2202.098553] env[62684]: DEBUG oslo_vmware.api [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52dae987-f477-b9a3-faee-a0e8487bbd67, 'name': SearchDatastore_Task, 'duration_secs': 0.01122} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2202.098553] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2202.098788] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2202.099115] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2202.099296] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2202.099482] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2202.099755] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-30c6f57d-6d32-4ade-99cd-38d724898e38 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.108844] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2202.109112] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2202.110329] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02523577-7a66-4091-9a26-e79cef260d94 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.116161] env[62684]: DEBUG oslo_vmware.api [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2202.116161] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]524fbd7d-55df-6ced-9c20-0ec9eb6dcfa7" [ 2202.116161] env[62684]: _type = "Task" [ 2202.116161] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2202.124211] env[62684]: DEBUG oslo_vmware.api [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]524fbd7d-55df-6ced-9c20-0ec9eb6dcfa7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2202.188901] env[62684]: DEBUG nova.network.neutron [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Updating instance_info_cache with network_info: [{"id": "d95acbd4-4387-4053-9921-da1c96923d5c", "address": "fa:16:3e:d0:b1:bb", "network": {"id": "a4e44087-50f0-426e-8ce5-676ae0edb8ce", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1375053640-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a21c5c0324ff44789b5722eaea3baf1d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f1b507ed-cd2d-4c09-9d96-c47bde6a7774", "external-id": "nsx-vlan-transportzone-980", "segmentation_id": 980, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd95acbd4-43", "ovs_interfaceid": "d95acbd4-4387-4053-9921-da1c96923d5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2202.235215] env[62684]: DEBUG nova.network.neutron [None req-391b6161-b6e5-436d-93b6-a226667cae71 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Successfully updated port: e60b8515-c469-46d3-945b-bc843ccffc44 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2202.275267] env[62684]: INFO nova.compute.manager [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] bringing vm to original state: 'stopped' [ 2202.277923] env[62684]: INFO nova.compute.manager [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Took 24.14 seconds to build instance. [ 2202.300642] env[62684]: DEBUG nova.compute.manager [req-1fb1c3b0-f97c-4817-9610-53e54e3dba7a req-237e0570-8472-4365-b0ef-c26453429762 service nova] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Received event network-vif-plugged-e60b8515-c469-46d3-945b-bc843ccffc44 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2202.300932] env[62684]: DEBUG oslo_concurrency.lockutils [req-1fb1c3b0-f97c-4817-9610-53e54e3dba7a req-237e0570-8472-4365-b0ef-c26453429762 service nova] Acquiring lock "d7f09d0e-f7b6-415e-8d82-47eba1153aa1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2202.301139] env[62684]: DEBUG oslo_concurrency.lockutils [req-1fb1c3b0-f97c-4817-9610-53e54e3dba7a req-237e0570-8472-4365-b0ef-c26453429762 service nova] Lock "d7f09d0e-f7b6-415e-8d82-47eba1153aa1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2202.301338] env[62684]: DEBUG oslo_concurrency.lockutils [req-1fb1c3b0-f97c-4817-9610-53e54e3dba7a req-237e0570-8472-4365-b0ef-c26453429762 service nova] Lock "d7f09d0e-f7b6-415e-8d82-47eba1153aa1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2202.301513] env[62684]: DEBUG nova.compute.manager [req-1fb1c3b0-f97c-4817-9610-53e54e3dba7a req-237e0570-8472-4365-b0ef-c26453429762 service nova] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] No waiting events found dispatching network-vif-plugged-e60b8515-c469-46d3-945b-bc843ccffc44 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2202.301687] env[62684]: WARNING nova.compute.manager [req-1fb1c3b0-f97c-4817-9610-53e54e3dba7a req-237e0570-8472-4365-b0ef-c26453429762 service nova] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Received unexpected event network-vif-plugged-e60b8515-c469-46d3-945b-bc843ccffc44 for instance with vm_state active and task_state None. [ 2202.301853] env[62684]: DEBUG nova.compute.manager [req-1fb1c3b0-f97c-4817-9610-53e54e3dba7a req-237e0570-8472-4365-b0ef-c26453429762 service nova] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Received event network-changed-e60b8515-c469-46d3-945b-bc843ccffc44 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2202.302018] env[62684]: DEBUG nova.compute.manager [req-1fb1c3b0-f97c-4817-9610-53e54e3dba7a req-237e0570-8472-4365-b0ef-c26453429762 service nova] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Refreshing instance network info cache due to event network-changed-e60b8515-c469-46d3-945b-bc843ccffc44. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2202.302610] env[62684]: DEBUG oslo_concurrency.lockutils [req-1fb1c3b0-f97c-4817-9610-53e54e3dba7a req-237e0570-8472-4365-b0ef-c26453429762 service nova] Acquiring lock "refresh_cache-d7f09d0e-f7b6-415e-8d82-47eba1153aa1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2202.302610] env[62684]: DEBUG oslo_concurrency.lockutils [req-1fb1c3b0-f97c-4817-9610-53e54e3dba7a req-237e0570-8472-4365-b0ef-c26453429762 service nova] Acquired lock "refresh_cache-d7f09d0e-f7b6-415e-8d82-47eba1153aa1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2202.302610] env[62684]: DEBUG nova.network.neutron [req-1fb1c3b0-f97c-4817-9610-53e54e3dba7a req-237e0570-8472-4365-b0ef-c26453429762 service nova] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Refreshing network info cache for port e60b8515-c469-46d3-945b-bc843ccffc44 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2202.349029] env[62684]: DEBUG oslo_vmware.api [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053542, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2202.593241] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6a301a5d-eb03-4600-bdb0-0ca3153101f9 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.910s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2202.596025] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 6.585s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2202.616848] env[62684]: INFO nova.scheduler.client.report [None req-6a301a5d-eb03-4600-bdb0-0ca3153101f9 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Deleted allocations for instance f4fab142-8066-43c1-abaa-a9f66775114c [ 2202.630027] env[62684]: DEBUG oslo_vmware.api [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]524fbd7d-55df-6ced-9c20-0ec9eb6dcfa7, 'name': SearchDatastore_Task, 'duration_secs': 0.008628} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2202.630227] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36d8bf4f-fa97-40cf-b3ab-9410394a6931 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.635570] env[62684]: DEBUG oslo_vmware.api [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2202.635570] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52cdb85f-5d79-3f63-7a87-258b1dbee842" [ 2202.635570] env[62684]: _type = "Task" [ 2202.635570] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2202.643209] env[62684]: DEBUG oslo_vmware.api [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52cdb85f-5d79-3f63-7a87-258b1dbee842, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2202.691884] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Releasing lock "refresh_cache-3ff55331-6d5c-4558-b932-e266670f2ac9" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2202.692267] env[62684]: DEBUG nova.compute.manager [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Instance network_info: |[{"id": "d95acbd4-4387-4053-9921-da1c96923d5c", "address": "fa:16:3e:d0:b1:bb", "network": {"id": "a4e44087-50f0-426e-8ce5-676ae0edb8ce", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1375053640-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a21c5c0324ff44789b5722eaea3baf1d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f1b507ed-cd2d-4c09-9d96-c47bde6a7774", "external-id": "nsx-vlan-transportzone-980", "segmentation_id": 980, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd95acbd4-43", "ovs_interfaceid": "d95acbd4-4387-4053-9921-da1c96923d5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2202.692691] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d0:b1:bb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f1b507ed-cd2d-4c09-9d96-c47bde6a7774', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd95acbd4-4387-4053-9921-da1c96923d5c', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2202.700163] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Creating folder: Project (a21c5c0324ff44789b5722eaea3baf1d). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2202.702710] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a6ac8e18-3ae6-4248-88f5-764201326930 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.705621] env[62684]: DEBUG nova.compute.manager [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2202.712779] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Created folder: Project (a21c5c0324ff44789b5722eaea3baf1d) in parent group-v421118. [ 2202.712971] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Creating folder: Instances. Parent ref: group-v421381. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2202.713306] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-be50c028-0e7b-450a-b653-2266e1be0ae8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.722056] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Created folder: Instances in parent group-v421381. [ 2202.722294] env[62684]: DEBUG oslo.service.loopingcall [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2202.722489] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2202.722698] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6d0837d9-5d77-4233-a095-ec72a2e27773 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.739956] env[62684]: DEBUG oslo_concurrency.lockutils [None req-391b6161-b6e5-436d-93b6-a226667cae71 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "refresh_cache-d7f09d0e-f7b6-415e-8d82-47eba1153aa1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2202.746031] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2202.746031] env[62684]: value = "task-2053545" [ 2202.746031] env[62684]: _type = "Task" [ 2202.746031] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2202.747614] env[62684]: DEBUG nova.virt.hardware [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2202.747840] env[62684]: DEBUG nova.virt.hardware [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2202.748013] env[62684]: DEBUG nova.virt.hardware [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2202.748206] env[62684]: DEBUG nova.virt.hardware [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2202.748356] env[62684]: DEBUG nova.virt.hardware [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2202.748509] env[62684]: DEBUG nova.virt.hardware [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2202.748720] env[62684]: DEBUG nova.virt.hardware [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2202.748877] env[62684]: DEBUG nova.virt.hardware [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2202.749087] env[62684]: DEBUG nova.virt.hardware [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2202.749271] env[62684]: DEBUG nova.virt.hardware [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2202.749452] env[62684]: DEBUG nova.virt.hardware [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2202.750358] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd0625b5-d857-4446-80a1-45173f28bbe9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.764936] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73fd8c7c-c0b1-4fac-a8a5-7bff4066eb50 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.768995] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053545, 'name': CreateVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2202.783055] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0ce74bec-56b8-4323-bd9e-2beb201755b5 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "264c6900-dbef-455e-95cc-1df73c735cc8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.648s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2202.803785] env[62684]: DEBUG nova.compute.manager [req-cc635dab-8cf3-4507-882e-e81c660e3e49 req-d288a29a-a615-408a-b1c4-a23147be70da service nova] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Received event network-vif-plugged-d95acbd4-4387-4053-9921-da1c96923d5c {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2202.804027] env[62684]: DEBUG oslo_concurrency.lockutils [req-cc635dab-8cf3-4507-882e-e81c660e3e49 req-d288a29a-a615-408a-b1c4-a23147be70da service nova] Acquiring lock "3ff55331-6d5c-4558-b932-e266670f2ac9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2202.804254] env[62684]: DEBUG oslo_concurrency.lockutils [req-cc635dab-8cf3-4507-882e-e81c660e3e49 req-d288a29a-a615-408a-b1c4-a23147be70da service nova] Lock "3ff55331-6d5c-4558-b932-e266670f2ac9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2202.804429] env[62684]: DEBUG oslo_concurrency.lockutils [req-cc635dab-8cf3-4507-882e-e81c660e3e49 req-d288a29a-a615-408a-b1c4-a23147be70da service nova] Lock "3ff55331-6d5c-4558-b932-e266670f2ac9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2202.804601] env[62684]: DEBUG nova.compute.manager [req-cc635dab-8cf3-4507-882e-e81c660e3e49 req-d288a29a-a615-408a-b1c4-a23147be70da service nova] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] No waiting events found dispatching network-vif-plugged-d95acbd4-4387-4053-9921-da1c96923d5c {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2202.804772] env[62684]: WARNING nova.compute.manager [req-cc635dab-8cf3-4507-882e-e81c660e3e49 req-d288a29a-a615-408a-b1c4-a23147be70da service nova] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Received unexpected event network-vif-plugged-d95acbd4-4387-4053-9921-da1c96923d5c for instance with vm_state building and task_state spawning. [ 2202.804937] env[62684]: DEBUG nova.compute.manager [req-cc635dab-8cf3-4507-882e-e81c660e3e49 req-d288a29a-a615-408a-b1c4-a23147be70da service nova] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Received event network-changed-d95acbd4-4387-4053-9921-da1c96923d5c {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2202.805110] env[62684]: DEBUG nova.compute.manager [req-cc635dab-8cf3-4507-882e-e81c660e3e49 req-d288a29a-a615-408a-b1c4-a23147be70da service nova] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Refreshing instance network info cache due to event network-changed-d95acbd4-4387-4053-9921-da1c96923d5c. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2202.805301] env[62684]: DEBUG oslo_concurrency.lockutils [req-cc635dab-8cf3-4507-882e-e81c660e3e49 req-d288a29a-a615-408a-b1c4-a23147be70da service nova] Acquiring lock "refresh_cache-3ff55331-6d5c-4558-b932-e266670f2ac9" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2202.805495] env[62684]: DEBUG oslo_concurrency.lockutils [req-cc635dab-8cf3-4507-882e-e81c660e3e49 req-d288a29a-a615-408a-b1c4-a23147be70da service nova] Acquired lock "refresh_cache-3ff55331-6d5c-4558-b932-e266670f2ac9" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2202.805617] env[62684]: DEBUG nova.network.neutron [req-cc635dab-8cf3-4507-882e-e81c660e3e49 req-d288a29a-a615-408a-b1c4-a23147be70da service nova] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Refreshing network info cache for port d95acbd4-4387-4053-9921-da1c96923d5c {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2202.852297] env[62684]: DEBUG oslo_vmware.api [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053542, 'name': PowerOnVM_Task, 'duration_secs': 0.635211} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2202.852588] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2202.852779] env[62684]: INFO nova.compute.manager [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Took 10.19 seconds to spawn the instance on the hypervisor. [ 2202.853258] env[62684]: DEBUG nova.compute.manager [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2202.853784] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7202ae55-acd0-46f0-8212-706060071255 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.028697] env[62684]: DEBUG nova.network.neutron [req-1fb1c3b0-f97c-4817-9610-53e54e3dba7a req-237e0570-8472-4365-b0ef-c26453429762 service nova] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Added VIF to instance network info cache for port e60b8515-c469-46d3-945b-bc843ccffc44. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3546}} [ 2203.029280] env[62684]: DEBUG nova.network.neutron [req-1fb1c3b0-f97c-4817-9610-53e54e3dba7a req-237e0570-8472-4365-b0ef-c26453429762 service nova] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Updating instance_info_cache with network_info: [{"id": "efda8f4d-97b4-44f8-b30b-d26145e98e58", "address": "fa:16:3e:be:97:33", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.240", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefda8f4d-97", "ovs_interfaceid": "efda8f4d-97b4-44f8-b30b-d26145e98e58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e60b8515-c469-46d3-945b-bc843ccffc44", "address": "fa:16:3e:f9:ed:d2", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape60b8515-c4", "ovs_interfaceid": "e60b8515-c469-46d3-945b-bc843ccffc44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2203.127239] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6a301a5d-eb03-4600-bdb0-0ca3153101f9 tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "f4fab142-8066-43c1-abaa-a9f66775114c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.797s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2203.147050] env[62684]: DEBUG oslo_vmware.api [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52cdb85f-5d79-3f63-7a87-258b1dbee842, 'name': SearchDatastore_Task, 'duration_secs': 0.014442} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2203.147245] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2203.147582] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 02480039-f749-402a-92db-df664304a5bf/02480039-f749-402a-92db-df664304a5bf.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2203.147773] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7df7ef56-4351-49d9-aa10-91a58b8a214f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.154691] env[62684]: DEBUG oslo_vmware.api [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2203.154691] env[62684]: value = "task-2053546" [ 2203.154691] env[62684]: _type = "Task" [ 2203.154691] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2203.162801] env[62684]: DEBUG oslo_vmware.api [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053546, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2203.261936] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053545, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2203.287353] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "99a9653c-7221-4495-be5f-5441dc8da0f4" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2203.287353] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "99a9653c-7221-4495-be5f-5441dc8da0f4" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2203.287353] env[62684]: DEBUG nova.compute.manager [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2203.287724] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a39024ce-86c2-43a1-9d00-9433f53dca27 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.297592] env[62684]: DEBUG nova.compute.manager [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62684) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 2203.298302] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2203.298610] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-88c85010-1a3a-44e2-8978-f20abafe8bab {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.305574] env[62684]: DEBUG oslo_vmware.api [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2203.305574] env[62684]: value = "task-2053547" [ 2203.305574] env[62684]: _type = "Task" [ 2203.305574] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2203.319421] env[62684]: DEBUG oslo_vmware.api [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053547, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2203.375800] env[62684]: INFO nova.compute.manager [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Took 23.44 seconds to build instance. [ 2203.477081] env[62684]: DEBUG nova.network.neutron [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Successfully updated port: e9e4e88f-b0c7-406a-b45c-3fd6cfcb854b {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2203.533051] env[62684]: DEBUG oslo_concurrency.lockutils [req-1fb1c3b0-f97c-4817-9610-53e54e3dba7a req-237e0570-8472-4365-b0ef-c26453429762 service nova] Releasing lock "refresh_cache-d7f09d0e-f7b6-415e-8d82-47eba1153aa1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2203.533773] env[62684]: DEBUG oslo_concurrency.lockutils [None req-391b6161-b6e5-436d-93b6-a226667cae71 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquired lock "refresh_cache-d7f09d0e-f7b6-415e-8d82-47eba1153aa1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2203.534064] env[62684]: DEBUG nova.network.neutron [None req-391b6161-b6e5-436d-93b6-a226667cae71 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2203.594548] env[62684]: DEBUG nova.network.neutron [req-cc635dab-8cf3-4507-882e-e81c660e3e49 req-d288a29a-a615-408a-b1c4-a23147be70da service nova] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Updated VIF entry in instance network info cache for port d95acbd4-4387-4053-9921-da1c96923d5c. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2203.595466] env[62684]: DEBUG nova.network.neutron [req-cc635dab-8cf3-4507-882e-e81c660e3e49 req-d288a29a-a615-408a-b1c4-a23147be70da service nova] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Updating instance_info_cache with network_info: [{"id": "d95acbd4-4387-4053-9921-da1c96923d5c", "address": "fa:16:3e:d0:b1:bb", "network": {"id": "a4e44087-50f0-426e-8ce5-676ae0edb8ce", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1375053640-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a21c5c0324ff44789b5722eaea3baf1d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f1b507ed-cd2d-4c09-9d96-c47bde6a7774", "external-id": "nsx-vlan-transportzone-980", "segmentation_id": 980, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd95acbd4-43", "ovs_interfaceid": "d95acbd4-4387-4053-9921-da1c96923d5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2203.613578] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Skipping migration as instance is neither resizing nor live-migrating. {{(pid=62684) _update_usage_from_migrations /opt/stack/nova/nova/compute/resource_tracker.py:1563}} [ 2203.633594] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2203.633755] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance b1f70e39-bf37-4fb8-b95b-653b59bec265 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2203.633878] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance ca3d1a73-6f3b-4278-8fe7-03b66f407ba6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2203.633999] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 57537508-06e7-43a4-95c5-c4399b8bf93f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2203.634155] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 0156d807-1ab4-482f-91d1-172bf32bf23c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2203.634274] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 2baabe7a-ed33-4cef-9acc-a7b804610b0a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2203.634429] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance f037d6b2-2082-4611-985e-b9a077eb8250 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2203.634483] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance d7f09d0e-f7b6-415e-8d82-47eba1153aa1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2203.634595] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 584845d2-d146-42bf-8ef5-58532fe24f65 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2203.634700] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 99a9653c-7221-4495-be5f-5441dc8da0f4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2203.634807] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance daf1486b-d5c2-4341-8a27-36eeeb08cd26 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2203.634913] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance ba12fa9a-10e3-4624-98b5-4ff7365e1940 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2203.635037] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 02480039-f749-402a-92db-df664304a5bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2203.635163] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 3ff55331-6d5c-4558-b932-e266670f2ac9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2203.635271] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 58e67d8e-900e-4d22-a4fd-fe493758d4f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2203.635534] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Migration 3ac08fc8-b87f-4785-ac54-acacbaf5dfc4 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 2203.635534] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 264c6900-dbef-455e-95cc-1df73c735cc8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2203.635704] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Total usable vcpus: 48, total allocated vcpus: 16 {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2203.635872] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3584MB phys_disk=200GB used_disk=16GB total_vcpus=48 used_vcpus=16 pci_stats=[] {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2203.665430] env[62684]: DEBUG oslo_vmware.api [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053546, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.503798} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2203.665584] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 02480039-f749-402a-92db-df664304a5bf/02480039-f749-402a-92db-df664304a5bf.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2203.666037] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2203.668486] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-87a0352e-c4c4-4380-a15b-496031f3634b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.675424] env[62684]: DEBUG oslo_vmware.api [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2203.675424] env[62684]: value = "task-2053548" [ 2203.675424] env[62684]: _type = "Task" [ 2203.675424] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2203.685226] env[62684]: DEBUG oslo_vmware.api [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053548, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2203.759984] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053545, 'name': CreateVM_Task, 'duration_secs': 0.684081} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2203.760952] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2203.761055] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2203.761241] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2203.761558] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2203.764116] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3204981-6c6e-44ff-b450-76f35ac19d49 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.769015] env[62684]: DEBUG oslo_vmware.api [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Waiting for the task: (returnval){ [ 2203.769015] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521727b9-63e8-586c-3de9-a631f27288b1" [ 2203.769015] env[62684]: _type = "Task" [ 2203.769015] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2203.777021] env[62684]: DEBUG oslo_vmware.api [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521727b9-63e8-586c-3de9-a631f27288b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2203.815559] env[62684]: DEBUG oslo_vmware.api [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053547, 'name': PowerOffVM_Task, 'duration_secs': 0.379375} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2203.815862] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2203.816029] env[62684]: DEBUG nova.compute.manager [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2203.816818] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-797c5b4d-1f10-40de-8612-b08e9c38269d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.879678] env[62684]: DEBUG oslo_concurrency.lockutils [None req-16d95d85-99b1-4796-989e-5d0a69a84e06 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "ba12fa9a-10e3-4624-98b5-4ff7365e1940" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.959s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2203.893018] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2a92b89-7e0b-46d8-b409-7d92a492126b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.899463] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4af1ef9-7f5f-4840-88d2-438e6a7bc2c3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.934538] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e00abfb3-36cc-4e53-aeb0-2965d63d65b5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.939487] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab826c75-d0bf-4ed8-ac69-fb7b8a949784 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.953561] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2203.980857] env[62684]: DEBUG oslo_concurrency.lockutils [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Acquiring lock "refresh_cache-58e67d8e-900e-4d22-a4fd-fe493758d4f2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2203.981725] env[62684]: DEBUG oslo_concurrency.lockutils [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Acquired lock "refresh_cache-58e67d8e-900e-4d22-a4fd-fe493758d4f2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2203.982046] env[62684]: DEBUG nova.network.neutron [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2204.079141] env[62684]: WARNING nova.network.neutron [None req-391b6161-b6e5-436d-93b6-a226667cae71 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] bbb78a3c-6804-4aae-9107-4ae6699c305d already exists in list: networks containing: ['bbb78a3c-6804-4aae-9107-4ae6699c305d']. ignoring it [ 2204.079141] env[62684]: WARNING nova.network.neutron [None req-391b6161-b6e5-436d-93b6-a226667cae71 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] bbb78a3c-6804-4aae-9107-4ae6699c305d already exists in list: networks containing: ['bbb78a3c-6804-4aae-9107-4ae6699c305d']. ignoring it [ 2204.079141] env[62684]: WARNING nova.network.neutron [None req-391b6161-b6e5-436d-93b6-a226667cae71 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] e60b8515-c469-46d3-945b-bc843ccffc44 already exists in list: port_ids containing: ['e60b8515-c469-46d3-945b-bc843ccffc44']. ignoring it [ 2204.096821] env[62684]: DEBUG oslo_concurrency.lockutils [req-cc635dab-8cf3-4507-882e-e81c660e3e49 req-d288a29a-a615-408a-b1c4-a23147be70da service nova] Releasing lock "refresh_cache-3ff55331-6d5c-4558-b932-e266670f2ac9" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2204.097729] env[62684]: DEBUG nova.compute.manager [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Stashing vm_state: active {{(pid=62684) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 2204.188126] env[62684]: DEBUG oslo_vmware.api [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053548, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070368} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2204.188527] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2204.189914] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-010f089e-f22b-4c93-92c6-22575efde7c4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.215612] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Reconfiguring VM instance instance-00000064 to attach disk [datastore2] 02480039-f749-402a-92db-df664304a5bf/02480039-f749-402a-92db-df664304a5bf.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2204.216446] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d71a14fc-c427-4c7c-bc2d-12a4a0fc93fe {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.240792] env[62684]: DEBUG oslo_vmware.api [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2204.240792] env[62684]: value = "task-2053549" [ 2204.240792] env[62684]: _type = "Task" [ 2204.240792] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2204.249133] env[62684]: DEBUG oslo_vmware.api [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053549, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2204.281335] env[62684]: DEBUG oslo_vmware.api [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521727b9-63e8-586c-3de9-a631f27288b1, 'name': SearchDatastore_Task, 'duration_secs': 0.008576} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2204.281766] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2204.281917] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2204.282177] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2204.282328] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2204.282506] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2204.282776] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ca98ffe4-a831-4d59-bf4c-33b96e1b51e5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.291673] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2204.291936] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2204.294916] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e87ef667-9a5c-4f5d-aaeb-ca98248a3644 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.300490] env[62684]: DEBUG oslo_vmware.api [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Waiting for the task: (returnval){ [ 2204.300490] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d4a207-3b77-8fc1-28b3-b7f0478c6382" [ 2204.300490] env[62684]: _type = "Task" [ 2204.300490] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2204.309409] env[62684]: DEBUG oslo_vmware.api [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d4a207-3b77-8fc1-28b3-b7f0478c6382, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2204.332998] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "99a9653c-7221-4495-be5f-5441dc8da0f4" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.046s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2204.423113] env[62684]: DEBUG nova.network.neutron [None req-391b6161-b6e5-436d-93b6-a226667cae71 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Updating instance_info_cache with network_info: [{"id": "efda8f4d-97b4-44f8-b30b-d26145e98e58", "address": "fa:16:3e:be:97:33", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.240", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefda8f4d-97", "ovs_interfaceid": "efda8f4d-97b4-44f8-b30b-d26145e98e58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e60b8515-c469-46d3-945b-bc843ccffc44", "address": "fa:16:3e:f9:ed:d2", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape60b8515-c4", "ovs_interfaceid": "e60b8515-c469-46d3-945b-bc843ccffc44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2204.478818] env[62684]: ERROR nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [req-4606713a-d84a-4a36-b92f-06ec211e67e5] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c23c281e-ec1f-4876-972e-a98655f2084f. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-4606713a-d84a-4a36-b92f-06ec211e67e5"}]} [ 2204.499282] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Refreshing inventories for resource provider c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2204.518340] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Updating ProviderTree inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2204.518528] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2204.525789] env[62684]: DEBUG nova.network.neutron [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2204.532669] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Refreshing aggregate associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, aggregates: None {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2204.552044] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Refreshing trait associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2204.616874] env[62684]: DEBUG oslo_concurrency.lockutils [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2204.675059] env[62684]: DEBUG nova.network.neutron [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Updating instance_info_cache with network_info: [{"id": "e9e4e88f-b0c7-406a-b45c-3fd6cfcb854b", "address": "fa:16:3e:4a:e8:95", "network": {"id": "d9fd767c-b907-4fb5-981d-fb0329dd5dfe", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1951802427-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3c0a5eb59cda47c2b3f4c6d6a4e58bca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9e4e88f-b0", "ovs_interfaceid": "e9e4e88f-b0c7-406a-b45c-3fd6cfcb854b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2204.755575] env[62684]: DEBUG oslo_vmware.api [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053549, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2204.784199] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce5ad3a2-0c97-4301-ba62-16b9addf0423 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.790353] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b7a8fa3-7b2f-4c9d-9b68-c04ef174c406 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.823376] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f902edc2-bd0e-406d-a820-a9a4bd777a15 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.831700] env[62684]: DEBUG oslo_vmware.api [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d4a207-3b77-8fc1-28b3-b7f0478c6382, 'name': SearchDatastore_Task, 'duration_secs': 0.030199} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2204.834159] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-765c53de-913a-42da-861e-b8e0b15d43cb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.837081] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-193c2e78-232a-49bf-8d0e-577a49970164 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.854920] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2204.856308] env[62684]: DEBUG oslo_vmware.api [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Waiting for the task: (returnval){ [ 2204.856308] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52397af2-4223-6775-bd2a-5edbb18c397d" [ 2204.856308] env[62684]: _type = "Task" [ 2204.856308] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2204.856879] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2204.869342] env[62684]: DEBUG oslo_vmware.api [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52397af2-4223-6775-bd2a-5edbb18c397d, 'name': SearchDatastore_Task, 'duration_secs': 0.009289} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2204.869602] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2204.870178] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 3ff55331-6d5c-4558-b932-e266670f2ac9/3ff55331-6d5c-4558-b932-e266670f2ac9.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2204.870410] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d62b6daa-113b-4b95-969f-d2b5d2568424 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.877452] env[62684]: DEBUG oslo_vmware.api [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Waiting for the task: (returnval){ [ 2204.877452] env[62684]: value = "task-2053550" [ 2204.877452] env[62684]: _type = "Task" [ 2204.877452] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2204.886691] env[62684]: DEBUG oslo_vmware.api [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Task: {'id': task-2053550, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2204.888513] env[62684]: DEBUG nova.compute.manager [req-d5db269d-cbc6-4cd7-a0f8-9aae3a8897ba req-828af565-5fa8-42b1-88a3-a0c39c1247b3 service nova] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Received event network-vif-plugged-e9e4e88f-b0c7-406a-b45c-3fd6cfcb854b {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2204.888787] env[62684]: DEBUG oslo_concurrency.lockutils [req-d5db269d-cbc6-4cd7-a0f8-9aae3a8897ba req-828af565-5fa8-42b1-88a3-a0c39c1247b3 service nova] Acquiring lock "58e67d8e-900e-4d22-a4fd-fe493758d4f2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2204.889036] env[62684]: DEBUG oslo_concurrency.lockutils [req-d5db269d-cbc6-4cd7-a0f8-9aae3a8897ba req-828af565-5fa8-42b1-88a3-a0c39c1247b3 service nova] Lock "58e67d8e-900e-4d22-a4fd-fe493758d4f2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2204.889257] env[62684]: DEBUG oslo_concurrency.lockutils [req-d5db269d-cbc6-4cd7-a0f8-9aae3a8897ba req-828af565-5fa8-42b1-88a3-a0c39c1247b3 service nova] Lock "58e67d8e-900e-4d22-a4fd-fe493758d4f2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2204.889475] env[62684]: DEBUG nova.compute.manager [req-d5db269d-cbc6-4cd7-a0f8-9aae3a8897ba req-828af565-5fa8-42b1-88a3-a0c39c1247b3 service nova] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] No waiting events found dispatching network-vif-plugged-e9e4e88f-b0c7-406a-b45c-3fd6cfcb854b {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2204.889666] env[62684]: WARNING nova.compute.manager [req-d5db269d-cbc6-4cd7-a0f8-9aae3a8897ba req-828af565-5fa8-42b1-88a3-a0c39c1247b3 service nova] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Received unexpected event network-vif-plugged-e9e4e88f-b0c7-406a-b45c-3fd6cfcb854b for instance with vm_state building and task_state spawning. [ 2204.889836] env[62684]: DEBUG nova.compute.manager [req-d5db269d-cbc6-4cd7-a0f8-9aae3a8897ba req-828af565-5fa8-42b1-88a3-a0c39c1247b3 service nova] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Received event network-changed-e9e4e88f-b0c7-406a-b45c-3fd6cfcb854b {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2204.889999] env[62684]: DEBUG nova.compute.manager [req-d5db269d-cbc6-4cd7-a0f8-9aae3a8897ba req-828af565-5fa8-42b1-88a3-a0c39c1247b3 service nova] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Refreshing instance network info cache due to event network-changed-e9e4e88f-b0c7-406a-b45c-3fd6cfcb854b. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2204.890264] env[62684]: DEBUG oslo_concurrency.lockutils [req-d5db269d-cbc6-4cd7-a0f8-9aae3a8897ba req-828af565-5fa8-42b1-88a3-a0c39c1247b3 service nova] Acquiring lock "refresh_cache-58e67d8e-900e-4d22-a4fd-fe493758d4f2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2204.926079] env[62684]: DEBUG oslo_concurrency.lockutils [None req-391b6161-b6e5-436d-93b6-a226667cae71 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Releasing lock "refresh_cache-d7f09d0e-f7b6-415e-8d82-47eba1153aa1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2204.926782] env[62684]: DEBUG oslo_concurrency.lockutils [None req-391b6161-b6e5-436d-93b6-a226667cae71 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "d7f09d0e-f7b6-415e-8d82-47eba1153aa1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2204.926947] env[62684]: DEBUG oslo_concurrency.lockutils [None req-391b6161-b6e5-436d-93b6-a226667cae71 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquired lock "d7f09d0e-f7b6-415e-8d82-47eba1153aa1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2204.928049] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0159df75-43b7-43ff-838b-39c4d41c85a8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.944745] env[62684]: DEBUG nova.virt.hardware [None req-391b6161-b6e5-436d-93b6-a226667cae71 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2204.944977] env[62684]: DEBUG nova.virt.hardware [None req-391b6161-b6e5-436d-93b6-a226667cae71 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2204.945153] env[62684]: DEBUG nova.virt.hardware [None req-391b6161-b6e5-436d-93b6-a226667cae71 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2204.945339] env[62684]: DEBUG nova.virt.hardware [None req-391b6161-b6e5-436d-93b6-a226667cae71 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2204.945487] env[62684]: DEBUG nova.virt.hardware [None req-391b6161-b6e5-436d-93b6-a226667cae71 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2204.945636] env[62684]: DEBUG nova.virt.hardware [None req-391b6161-b6e5-436d-93b6-a226667cae71 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2204.945841] env[62684]: DEBUG nova.virt.hardware [None req-391b6161-b6e5-436d-93b6-a226667cae71 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2204.946013] env[62684]: DEBUG nova.virt.hardware [None req-391b6161-b6e5-436d-93b6-a226667cae71 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2204.946188] env[62684]: DEBUG nova.virt.hardware [None req-391b6161-b6e5-436d-93b6-a226667cae71 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2204.946357] env[62684]: DEBUG nova.virt.hardware [None req-391b6161-b6e5-436d-93b6-a226667cae71 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2204.946530] env[62684]: DEBUG nova.virt.hardware [None req-391b6161-b6e5-436d-93b6-a226667cae71 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2204.952669] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-391b6161-b6e5-436d-93b6-a226667cae71 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Reconfiguring VM to attach interface {{(pid=62684) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 2204.953327] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a175aca5-9043-4428-b463-d5f735851f25 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.971370] env[62684]: DEBUG oslo_vmware.api [None req-391b6161-b6e5-436d-93b6-a226667cae71 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 2204.971370] env[62684]: value = "task-2053551" [ 2204.971370] env[62684]: _type = "Task" [ 2204.971370] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2204.979526] env[62684]: DEBUG oslo_vmware.api [None req-391b6161-b6e5-436d-93b6-a226667cae71 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053551, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2205.178108] env[62684]: DEBUG oslo_concurrency.lockutils [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Releasing lock "refresh_cache-58e67d8e-900e-4d22-a4fd-fe493758d4f2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2205.178601] env[62684]: DEBUG nova.compute.manager [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Instance network_info: |[{"id": "e9e4e88f-b0c7-406a-b45c-3fd6cfcb854b", "address": "fa:16:3e:4a:e8:95", "network": {"id": "d9fd767c-b907-4fb5-981d-fb0329dd5dfe", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1951802427-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3c0a5eb59cda47c2b3f4c6d6a4e58bca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9e4e88f-b0", "ovs_interfaceid": "e9e4e88f-b0c7-406a-b45c-3fd6cfcb854b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2205.179063] env[62684]: DEBUG oslo_concurrency.lockutils [req-d5db269d-cbc6-4cd7-a0f8-9aae3a8897ba req-828af565-5fa8-42b1-88a3-a0c39c1247b3 service nova] Acquired lock "refresh_cache-58e67d8e-900e-4d22-a4fd-fe493758d4f2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2205.179428] env[62684]: DEBUG nova.network.neutron [req-d5db269d-cbc6-4cd7-a0f8-9aae3a8897ba req-828af565-5fa8-42b1-88a3-a0c39c1247b3 service nova] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Refreshing network info cache for port e9e4e88f-b0c7-406a-b45c-3fd6cfcb854b {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2205.180861] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4a:e8:95', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '415e68b4-3766-4359-afe2-f8563910d98c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e9e4e88f-b0c7-406a-b45c-3fd6cfcb854b', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2205.188337] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Creating folder: Project (3c0a5eb59cda47c2b3f4c6d6a4e58bca). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2205.188923] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f77fbcec-6c07-4c35-9975-8f188fd4abef {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.201729] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Created folder: Project (3c0a5eb59cda47c2b3f4c6d6a4e58bca) in parent group-v421118. [ 2205.202009] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Creating folder: Instances. Parent ref: group-v421384. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2205.202238] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3c1461f4-2bae-4ec7-a9f7-a57fc8a2ff70 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.213119] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Created folder: Instances in parent group-v421384. [ 2205.213425] env[62684]: DEBUG oslo.service.loopingcall [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2205.213753] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2205.214139] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-72a126d5-441a-4b0f-91f8-6b9b984dc49e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.235227] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2205.235227] env[62684]: value = "task-2053554" [ 2205.235227] env[62684]: _type = "Task" [ 2205.235227] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2205.249543] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053554, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2205.255406] env[62684]: DEBUG oslo_vmware.api [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053549, 'name': ReconfigVM_Task, 'duration_secs': 1.002916} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2205.255778] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Reconfigured VM instance instance-00000064 to attach disk [datastore2] 02480039-f749-402a-92db-df664304a5bf/02480039-f749-402a-92db-df664304a5bf.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2205.256517] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-45445b4e-7922-4005-85c5-aa80a844d2e1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.264142] env[62684]: DEBUG oslo_vmware.api [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2205.264142] env[62684]: value = "task-2053555" [ 2205.264142] env[62684]: _type = "Task" [ 2205.264142] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2205.273915] env[62684]: DEBUG oslo_vmware.api [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053555, 'name': Rename_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2205.364775] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e0e15a31-e358-4807-8450-d6365716a5ca tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2205.365081] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e0e15a31-e358-4807-8450-d6365716a5ca tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2205.365386] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e0e15a31-e358-4807-8450-d6365716a5ca tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2205.365617] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e0e15a31-e358-4807-8450-d6365716a5ca tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2205.365875] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e0e15a31-e358-4807-8450-d6365716a5ca tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2205.368394] env[62684]: INFO nova.compute.manager [None req-e0e15a31-e358-4807-8450-d6365716a5ca tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Terminating instance [ 2205.370368] env[62684]: DEBUG nova.compute.manager [None req-e0e15a31-e358-4807-8450-d6365716a5ca tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2205.370617] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e0e15a31-e358-4807-8450-d6365716a5ca tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2205.372162] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe63168b-fe89-4387-82eb-f4d7ca43ed27 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.380444] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0e15a31-e358-4807-8450-d6365716a5ca tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2205.383332] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7ca9a865-f04f-43c9-930f-5f0a4ff7b711 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.385418] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Updated inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f with generation 153 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2205.385615] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Updating resource provider c23c281e-ec1f-4876-972e-a98655f2084f generation from 153 to 154 during operation: update_inventory {{(pid=62684) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2205.385814] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2205.394743] env[62684]: DEBUG oslo_vmware.api [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Task: {'id': task-2053550, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.480834} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2205.396170] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 3ff55331-6d5c-4558-b932-e266670f2ac9/3ff55331-6d5c-4558-b932-e266670f2ac9.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2205.396406] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2205.397110] env[62684]: DEBUG oslo_vmware.api [None req-e0e15a31-e358-4807-8450-d6365716a5ca tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2205.397110] env[62684]: value = "task-2053556" [ 2205.397110] env[62684]: _type = "Task" [ 2205.397110] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2205.397472] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6ee0c95f-d915-46a1-afc7-c7dc5616ba41 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.410864] env[62684]: DEBUG oslo_vmware.api [None req-e0e15a31-e358-4807-8450-d6365716a5ca tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053556, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2205.412294] env[62684]: DEBUG oslo_vmware.api [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Waiting for the task: (returnval){ [ 2205.412294] env[62684]: value = "task-2053557" [ 2205.412294] env[62684]: _type = "Task" [ 2205.412294] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2205.420567] env[62684]: DEBUG oslo_vmware.api [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Task: {'id': task-2053557, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2205.481698] env[62684]: DEBUG oslo_vmware.api [None req-391b6161-b6e5-436d-93b6-a226667cae71 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053551, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2205.745840] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053554, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2205.774908] env[62684]: DEBUG oslo_vmware.api [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053555, 'name': Rename_Task, 'duration_secs': 0.198908} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2205.775295] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2205.775611] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-777e5c57-67b2-4e14-9eb7-83672cfb3ede {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.784709] env[62684]: DEBUG oslo_vmware.api [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2205.784709] env[62684]: value = "task-2053558" [ 2205.784709] env[62684]: _type = "Task" [ 2205.784709] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2205.793319] env[62684]: DEBUG oslo_vmware.api [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053558, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2205.896358] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62684) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2205.896581] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.301s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2205.896887] env[62684]: DEBUG oslo_concurrency.lockutils [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 1.280s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2205.911511] env[62684]: DEBUG oslo_vmware.api [None req-e0e15a31-e358-4807-8450-d6365716a5ca tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053556, 'name': PowerOffVM_Task, 'duration_secs': 0.241783} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2205.913732] env[62684]: DEBUG nova.network.neutron [req-d5db269d-cbc6-4cd7-a0f8-9aae3a8897ba req-828af565-5fa8-42b1-88a3-a0c39c1247b3 service nova] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Updated VIF entry in instance network info cache for port e9e4e88f-b0c7-406a-b45c-3fd6cfcb854b. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2205.914134] env[62684]: DEBUG nova.network.neutron [req-d5db269d-cbc6-4cd7-a0f8-9aae3a8897ba req-828af565-5fa8-42b1-88a3-a0c39c1247b3 service nova] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Updating instance_info_cache with network_info: [{"id": "e9e4e88f-b0c7-406a-b45c-3fd6cfcb854b", "address": "fa:16:3e:4a:e8:95", "network": {"id": "d9fd767c-b907-4fb5-981d-fb0329dd5dfe", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1951802427-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3c0a5eb59cda47c2b3f4c6d6a4e58bca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9e4e88f-b0", "ovs_interfaceid": "e9e4e88f-b0c7-406a-b45c-3fd6cfcb854b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2205.915857] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0e15a31-e358-4807-8450-d6365716a5ca tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2205.916112] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e0e15a31-e358-4807-8450-d6365716a5ca tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2205.917396] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-24659e3a-9206-4531-9a6a-1df262663a78 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.929753] env[62684]: DEBUG oslo_vmware.api [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Task: {'id': task-2053557, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067965} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2205.930381] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2205.931507] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22e2467d-70ea-44be-bfff-30228a713684 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.955269] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Reconfiguring VM instance instance-00000065 to attach disk [datastore2] 3ff55331-6d5c-4558-b932-e266670f2ac9/3ff55331-6d5c-4558-b932-e266670f2ac9.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2205.956947] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7ba53c5a-27ef-4091-b54a-bf876ffed5fe {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.980045] env[62684]: DEBUG oslo_vmware.api [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Waiting for the task: (returnval){ [ 2205.980045] env[62684]: value = "task-2053560" [ 2205.980045] env[62684]: _type = "Task" [ 2205.980045] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2205.983509] env[62684]: DEBUG oslo_vmware.api [None req-391b6161-b6e5-436d-93b6-a226667cae71 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053551, 'name': ReconfigVM_Task, 'duration_secs': 0.953881} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2205.987034] env[62684]: DEBUG oslo_concurrency.lockutils [None req-391b6161-b6e5-436d-93b6-a226667cae71 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Releasing lock "d7f09d0e-f7b6-415e-8d82-47eba1153aa1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2205.987034] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-391b6161-b6e5-436d-93b6-a226667cae71 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Reconfigured VM to attach interface {{(pid=62684) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 2205.998045] env[62684]: DEBUG oslo_vmware.api [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Task: {'id': task-2053560, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2206.175197] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e0e15a31-e358-4807-8450-d6365716a5ca tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2206.175570] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e0e15a31-e358-4807-8450-d6365716a5ca tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2206.175876] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0e15a31-e358-4807-8450-d6365716a5ca tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Deleting the datastore file [datastore1] ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2206.176211] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fee6b1f0-dd6a-4e69-80ce-cf92278f2bdd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.182636] env[62684]: DEBUG oslo_vmware.api [None req-e0e15a31-e358-4807-8450-d6365716a5ca tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for the task: (returnval){ [ 2206.182636] env[62684]: value = "task-2053561" [ 2206.182636] env[62684]: _type = "Task" [ 2206.182636] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2206.190954] env[62684]: DEBUG oslo_vmware.api [None req-e0e15a31-e358-4807-8450-d6365716a5ca tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053561, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2206.246227] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053554, 'name': CreateVM_Task, 'duration_secs': 0.79845} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2206.246388] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2206.247073] env[62684]: DEBUG oslo_concurrency.lockutils [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2206.247255] env[62684]: DEBUG oslo_concurrency.lockutils [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2206.247881] env[62684]: DEBUG oslo_concurrency.lockutils [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2206.247881] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d46291e6-a04b-48ca-91a9-6374da822e4b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.252376] env[62684]: DEBUG oslo_vmware.api [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Waiting for the task: (returnval){ [ 2206.252376] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5207deba-3299-1d4c-5b6a-8d3e5113fb0e" [ 2206.252376] env[62684]: _type = "Task" [ 2206.252376] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2206.260337] env[62684]: DEBUG oslo_vmware.api [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5207deba-3299-1d4c-5b6a-8d3e5113fb0e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2206.294642] env[62684]: DEBUG oslo_vmware.api [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053558, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2206.402715] env[62684]: INFO nova.compute.claims [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2206.418214] env[62684]: DEBUG oslo_concurrency.lockutils [req-d5db269d-cbc6-4cd7-a0f8-9aae3a8897ba req-828af565-5fa8-42b1-88a3-a0c39c1247b3 service nova] Releasing lock "refresh_cache-58e67d8e-900e-4d22-a4fd-fe493758d4f2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2206.418471] env[62684]: DEBUG nova.compute.manager [req-d5db269d-cbc6-4cd7-a0f8-9aae3a8897ba req-828af565-5fa8-42b1-88a3-a0c39c1247b3 service nova] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Received event network-changed-0d17b2ea-8e17-456b-87e2-1e2bec93f187 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2206.418655] env[62684]: DEBUG nova.compute.manager [req-d5db269d-cbc6-4cd7-a0f8-9aae3a8897ba req-828af565-5fa8-42b1-88a3-a0c39c1247b3 service nova] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Refreshing instance network info cache due to event network-changed-0d17b2ea-8e17-456b-87e2-1e2bec93f187. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2206.418909] env[62684]: DEBUG oslo_concurrency.lockutils [req-d5db269d-cbc6-4cd7-a0f8-9aae3a8897ba req-828af565-5fa8-42b1-88a3-a0c39c1247b3 service nova] Acquiring lock "refresh_cache-ba12fa9a-10e3-4624-98b5-4ff7365e1940" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2206.419047] env[62684]: DEBUG oslo_concurrency.lockutils [req-d5db269d-cbc6-4cd7-a0f8-9aae3a8897ba req-828af565-5fa8-42b1-88a3-a0c39c1247b3 service nova] Acquired lock "refresh_cache-ba12fa9a-10e3-4624-98b5-4ff7365e1940" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2206.419227] env[62684]: DEBUG nova.network.neutron [req-d5db269d-cbc6-4cd7-a0f8-9aae3a8897ba req-828af565-5fa8-42b1-88a3-a0c39c1247b3 service nova] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Refreshing network info cache for port 0d17b2ea-8e17-456b-87e2-1e2bec93f187 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2206.475795] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0bff8640-da66-4a02-8617-c9e18d8c1b5c tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "99a9653c-7221-4495-be5f-5441dc8da0f4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2206.476114] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0bff8640-da66-4a02-8617-c9e18d8c1b5c tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "99a9653c-7221-4495-be5f-5441dc8da0f4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2206.476711] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0bff8640-da66-4a02-8617-c9e18d8c1b5c tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "99a9653c-7221-4495-be5f-5441dc8da0f4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2206.476966] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0bff8640-da66-4a02-8617-c9e18d8c1b5c tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "99a9653c-7221-4495-be5f-5441dc8da0f4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2206.477216] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0bff8640-da66-4a02-8617-c9e18d8c1b5c tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "99a9653c-7221-4495-be5f-5441dc8da0f4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2206.479780] env[62684]: INFO nova.compute.manager [None req-0bff8640-da66-4a02-8617-c9e18d8c1b5c tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Terminating instance [ 2206.481901] env[62684]: DEBUG nova.compute.manager [None req-0bff8640-da66-4a02-8617-c9e18d8c1b5c tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2206.482123] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0bff8640-da66-4a02-8617-c9e18d8c1b5c tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2206.482968] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4806b76c-f05b-424a-b715-18485ef4a3ce {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.491427] env[62684]: DEBUG oslo_concurrency.lockutils [None req-391b6161-b6e5-436d-93b6-a226667cae71 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "interface-d7f09d0e-f7b6-415e-8d82-47eba1153aa1-e60b8515-c469-46d3-945b-bc843ccffc44" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.649s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2206.497631] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0bff8640-da66-4a02-8617-c9e18d8c1b5c tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2206.498165] env[62684]: DEBUG oslo_vmware.api [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Task: {'id': task-2053560, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2206.498573] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a2803e72-a645-42f9-9472-547653965c91 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.692331] env[62684]: DEBUG oslo_vmware.api [None req-e0e15a31-e358-4807-8450-d6365716a5ca tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Task: {'id': task-2053561, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.358757} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2206.692584] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0e15a31-e358-4807-8450-d6365716a5ca tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2206.692776] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e0e15a31-e358-4807-8450-d6365716a5ca tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2206.692971] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e0e15a31-e358-4807-8450-d6365716a5ca tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2206.693148] env[62684]: INFO nova.compute.manager [None req-e0e15a31-e358-4807-8450-d6365716a5ca tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Took 1.32 seconds to destroy the instance on the hypervisor. [ 2206.693395] env[62684]: DEBUG oslo.service.loopingcall [None req-e0e15a31-e358-4807-8450-d6365716a5ca tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2206.693599] env[62684]: DEBUG nova.compute.manager [-] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2206.693730] env[62684]: DEBUG nova.network.neutron [-] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2206.708113] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0bff8640-da66-4a02-8617-c9e18d8c1b5c tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2206.708541] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0bff8640-da66-4a02-8617-c9e18d8c1b5c tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2206.708910] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bff8640-da66-4a02-8617-c9e18d8c1b5c tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Deleting the datastore file [datastore2] 99a9653c-7221-4495-be5f-5441dc8da0f4 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2206.709367] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f839e152-550a-45ad-ac85-d8020cd15ae3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.716455] env[62684]: DEBUG oslo_vmware.api [None req-0bff8640-da66-4a02-8617-c9e18d8c1b5c tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2206.716455] env[62684]: value = "task-2053563" [ 2206.716455] env[62684]: _type = "Task" [ 2206.716455] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2206.724385] env[62684]: DEBUG oslo_vmware.api [None req-0bff8640-da66-4a02-8617-c9e18d8c1b5c tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053563, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2206.765833] env[62684]: DEBUG oslo_vmware.api [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5207deba-3299-1d4c-5b6a-8d3e5113fb0e, 'name': SearchDatastore_Task, 'duration_secs': 0.022668} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2206.766178] env[62684]: DEBUG oslo_concurrency.lockutils [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2206.766472] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2206.766716] env[62684]: DEBUG oslo_concurrency.lockutils [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2206.766870] env[62684]: DEBUG oslo_concurrency.lockutils [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2206.767077] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2206.767356] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fb907a31-bcf1-458c-83af-f4415cef94f6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.775355] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2206.775536] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2206.776262] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02c0cfdf-6651-48cb-a314-5a479cf59696 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.781552] env[62684]: DEBUG oslo_vmware.api [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Waiting for the task: (returnval){ [ 2206.781552] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a41603-a3fa-f94a-48db-4d5856fdaeda" [ 2206.781552] env[62684]: _type = "Task" [ 2206.781552] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2206.792622] env[62684]: DEBUG oslo_vmware.api [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a41603-a3fa-f94a-48db-4d5856fdaeda, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2206.797433] env[62684]: DEBUG oslo_vmware.api [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053558, 'name': PowerOnVM_Task, 'duration_secs': 0.769287} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2206.797667] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2206.797777] env[62684]: INFO nova.compute.manager [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Took 8.85 seconds to spawn the instance on the hypervisor. [ 2206.797962] env[62684]: DEBUG nova.compute.manager [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2206.798771] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eca376e-43cd-4ae7-b3d2-6bbe04774424 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.910083] env[62684]: INFO nova.compute.resource_tracker [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Updating resource usage from migration 3ac08fc8-b87f-4785-ac54-acacbaf5dfc4 [ 2206.996740] env[62684]: DEBUG oslo_vmware.api [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Task: {'id': task-2053560, 'name': ReconfigVM_Task, 'duration_secs': 0.859906} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2206.997151] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Reconfigured VM instance instance-00000065 to attach disk [datastore2] 3ff55331-6d5c-4558-b932-e266670f2ac9/3ff55331-6d5c-4558-b932-e266670f2ac9.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2206.998082] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ac237e57-79c8-4124-bae3-152f9a450235 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2207.005567] env[62684]: DEBUG oslo_vmware.api [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Waiting for the task: (returnval){ [ 2207.005567] env[62684]: value = "task-2053564" [ 2207.005567] env[62684]: _type = "Task" [ 2207.005567] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2207.014317] env[62684]: DEBUG oslo_vmware.api [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Task: {'id': task-2053564, 'name': Rename_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2207.135468] env[62684]: DEBUG nova.compute.manager [req-96c853b3-74db-4c82-b3a5-a1184d36cf1a req-30ec6d7d-9037-4d0f-9a16-18c3da9b018a service nova] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Received event network-vif-deleted-5d09e22d-005f-49a7-8c55-7d69dfd47687 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2207.135679] env[62684]: INFO nova.compute.manager [req-96c853b3-74db-4c82-b3a5-a1184d36cf1a req-30ec6d7d-9037-4d0f-9a16-18c3da9b018a service nova] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Neutron deleted interface 5d09e22d-005f-49a7-8c55-7d69dfd47687; detaching it from the instance and deleting it from the info cache [ 2207.135866] env[62684]: DEBUG nova.network.neutron [req-96c853b3-74db-4c82-b3a5-a1184d36cf1a req-30ec6d7d-9037-4d0f-9a16-18c3da9b018a service nova] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2207.226992] env[62684]: DEBUG oslo_vmware.api [None req-0bff8640-da66-4a02-8617-c9e18d8c1b5c tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053563, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149069} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2207.227329] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bff8640-da66-4a02-8617-c9e18d8c1b5c tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2207.227709] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0bff8640-da66-4a02-8617-c9e18d8c1b5c tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2207.227946] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0bff8640-da66-4a02-8617-c9e18d8c1b5c tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2207.228142] env[62684]: INFO nova.compute.manager [None req-0bff8640-da66-4a02-8617-c9e18d8c1b5c tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Took 0.75 seconds to destroy the instance on the hypervisor. [ 2207.228416] env[62684]: DEBUG oslo.service.loopingcall [None req-0bff8640-da66-4a02-8617-c9e18d8c1b5c tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2207.228634] env[62684]: DEBUG nova.compute.manager [-] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2207.228740] env[62684]: DEBUG nova.network.neutron [-] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2207.247184] env[62684]: DEBUG nova.network.neutron [req-d5db269d-cbc6-4cd7-a0f8-9aae3a8897ba req-828af565-5fa8-42b1-88a3-a0c39c1247b3 service nova] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Updated VIF entry in instance network info cache for port 0d17b2ea-8e17-456b-87e2-1e2bec93f187. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2207.247566] env[62684]: DEBUG nova.network.neutron [req-d5db269d-cbc6-4cd7-a0f8-9aae3a8897ba req-828af565-5fa8-42b1-88a3-a0c39c1247b3 service nova] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Updating instance_info_cache with network_info: [{"id": "0d17b2ea-8e17-456b-87e2-1e2bec93f187", "address": "fa:16:3e:1c:c6:37", "network": {"id": "1751424b-54a9-4879-9f32-aa15a9bb632c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-120070593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.211", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "263c101fcc5e493789b79dfd1ba97cc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92e4d027-e755-417b-8eea-9a8f24b85140", "external-id": "nsx-vlan-transportzone-756", "segmentation_id": 756, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d17b2ea-8e", "ovs_interfaceid": "0d17b2ea-8e17-456b-87e2-1e2bec93f187", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2207.258919] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38ff7f63-5cd2-4562-81ae-6808cdc2ef05 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2207.266934] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52c292db-e915-44ee-b287-6b4f746d217c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2207.302612] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ba9c063-adee-4a6c-b0e1-e137b0533a71 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2207.315301] env[62684]: DEBUG oslo_vmware.api [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a41603-a3fa-f94a-48db-4d5856fdaeda, 'name': SearchDatastore_Task, 'duration_secs': 0.009769} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2207.319858] env[62684]: INFO nova.compute.manager [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Took 18.20 seconds to build instance. [ 2207.321577] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a0b5d35-8f35-4d37-963f-49d376ca57d6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2207.327205] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbab79ea-4a8c-4355-ab2a-1c0da30aa8e7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2207.336093] env[62684]: DEBUG oslo_vmware.api [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Waiting for the task: (returnval){ [ 2207.336093] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523eda71-6509-0bac-d35f-cda7526f89bb" [ 2207.336093] env[62684]: _type = "Task" [ 2207.336093] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2207.344604] env[62684]: DEBUG nova.compute.provider_tree [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2207.354687] env[62684]: DEBUG oslo_vmware.api [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523eda71-6509-0bac-d35f-cda7526f89bb, 'name': SearchDatastore_Task, 'duration_secs': 0.009742} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2207.355467] env[62684]: DEBUG oslo_concurrency.lockutils [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2207.355784] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 58e67d8e-900e-4d22-a4fd-fe493758d4f2/58e67d8e-900e-4d22-a4fd-fe493758d4f2.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2207.356066] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d545641d-1a90-4200-a9d6-74cd6a789ec7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2207.368060] env[62684]: DEBUG oslo_vmware.api [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Waiting for the task: (returnval){ [ 2207.368060] env[62684]: value = "task-2053565" [ 2207.368060] env[62684]: _type = "Task" [ 2207.368060] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2207.377500] env[62684]: DEBUG oslo_vmware.api [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Task: {'id': task-2053565, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2207.452808] env[62684]: DEBUG nova.network.neutron [-] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2207.516928] env[62684]: DEBUG oslo_vmware.api [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Task: {'id': task-2053564, 'name': Rename_Task, 'duration_secs': 0.370082} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2207.517359] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2207.518075] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e99b1ab0-7627-423b-bed6-902b66503ad3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2207.526073] env[62684]: DEBUG oslo_vmware.api [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Waiting for the task: (returnval){ [ 2207.526073] env[62684]: value = "task-2053566" [ 2207.526073] env[62684]: _type = "Task" [ 2207.526073] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2207.537980] env[62684]: DEBUG oslo_vmware.api [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Task: {'id': task-2053566, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2207.642782] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-606debf5-facf-4f0b-88f4-522813d3bdff {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2207.654227] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee13faf6-8cc9-41c0-9311-7667b2e6d64d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2207.695776] env[62684]: DEBUG nova.compute.manager [req-96c853b3-74db-4c82-b3a5-a1184d36cf1a req-30ec6d7d-9037-4d0f-9a16-18c3da9b018a service nova] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Detach interface failed, port_id=5d09e22d-005f-49a7-8c55-7d69dfd47687, reason: Instance ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1 could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2207.750430] env[62684]: DEBUG oslo_concurrency.lockutils [req-d5db269d-cbc6-4cd7-a0f8-9aae3a8897ba req-828af565-5fa8-42b1-88a3-a0c39c1247b3 service nova] Releasing lock "refresh_cache-ba12fa9a-10e3-4624-98b5-4ff7365e1940" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2207.832216] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f3128b84-56a5-42f6-9435-2a2fe70c5479 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "02480039-f749-402a-92db-df664304a5bf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.721s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2207.850745] env[62684]: DEBUG nova.scheduler.client.report [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2207.880689] env[62684]: DEBUG oslo_vmware.api [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Task: {'id': task-2053565, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2207.956399] env[62684]: INFO nova.compute.manager [-] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Took 1.26 seconds to deallocate network for instance. [ 2208.035920] env[62684]: DEBUG oslo_vmware.api [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Task: {'id': task-2053566, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2208.104532] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dc53c289-04dc-487f-8818-dc3eab72d10e tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "interface-d7f09d0e-f7b6-415e-8d82-47eba1153aa1-e60b8515-c469-46d3-945b-bc843ccffc44" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2208.104827] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dc53c289-04dc-487f-8818-dc3eab72d10e tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "interface-d7f09d0e-f7b6-415e-8d82-47eba1153aa1-e60b8515-c469-46d3-945b-bc843ccffc44" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2208.224807] env[62684]: DEBUG nova.network.neutron [-] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2208.356842] env[62684]: DEBUG oslo_concurrency.lockutils [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.460s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2208.356965] env[62684]: INFO nova.compute.manager [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Migrating [ 2208.363624] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 3.507s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2208.363873] env[62684]: DEBUG nova.objects.instance [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62684) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 2208.383323] env[62684]: DEBUG oslo_vmware.api [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Task: {'id': task-2053565, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.517323} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2208.383525] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 58e67d8e-900e-4d22-a4fd-fe493758d4f2/58e67d8e-900e-4d22-a4fd-fe493758d4f2.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2208.383750] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2208.384014] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1aa1c4e6-72ac-4688-9f38-87c1d7b74692 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.392054] env[62684]: DEBUG oslo_vmware.api [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Waiting for the task: (returnval){ [ 2208.392054] env[62684]: value = "task-2053567" [ 2208.392054] env[62684]: _type = "Task" [ 2208.392054] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2208.401068] env[62684]: DEBUG oslo_vmware.api [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Task: {'id': task-2053567, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2208.451288] env[62684]: DEBUG oslo_concurrency.lockutils [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "02480039-f749-402a-92db-df664304a5bf" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2208.451572] env[62684]: DEBUG oslo_concurrency.lockutils [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "02480039-f749-402a-92db-df664304a5bf" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2208.451795] env[62684]: INFO nova.compute.manager [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Shelving [ 2208.463189] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e0e15a31-e358-4807-8450-d6365716a5ca tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2208.537355] env[62684]: DEBUG oslo_vmware.api [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Task: {'id': task-2053566, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2208.608070] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dc53c289-04dc-487f-8818-dc3eab72d10e tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "d7f09d0e-f7b6-415e-8d82-47eba1153aa1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2208.608293] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dc53c289-04dc-487f-8818-dc3eab72d10e tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquired lock "d7f09d0e-f7b6-415e-8d82-47eba1153aa1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2208.609259] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8715eb53-302a-49c8-bcc6-1b7d8b373da5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.628878] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7129bc0b-0628-4452-bd66-7ffe0e599355 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.655570] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-dc53c289-04dc-487f-8818-dc3eab72d10e tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Reconfiguring VM to detach interface {{(pid=62684) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 2208.655866] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-81f4f297-8e23-4005-b02a-d20346dbee68 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.675436] env[62684]: DEBUG oslo_vmware.api [None req-dc53c289-04dc-487f-8818-dc3eab72d10e tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 2208.675436] env[62684]: value = "task-2053568" [ 2208.675436] env[62684]: _type = "Task" [ 2208.675436] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2208.683919] env[62684]: DEBUG oslo_vmware.api [None req-dc53c289-04dc-487f-8818-dc3eab72d10e tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053568, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2208.727109] env[62684]: INFO nova.compute.manager [-] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Took 1.50 seconds to deallocate network for instance. [ 2208.878757] env[62684]: DEBUG oslo_concurrency.lockutils [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "refresh_cache-264c6900-dbef-455e-95cc-1df73c735cc8" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2208.878757] env[62684]: DEBUG oslo_concurrency.lockutils [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquired lock "refresh_cache-264c6900-dbef-455e-95cc-1df73c735cc8" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2208.878757] env[62684]: DEBUG nova.network.neutron [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2208.904771] env[62684]: DEBUG oslo_vmware.api [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Task: {'id': task-2053567, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.313759} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2208.905110] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2208.905903] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d871d5a0-4656-4633-82ee-57f8d62a3a26 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.928656] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] 58e67d8e-900e-4d22-a4fd-fe493758d4f2/58e67d8e-900e-4d22-a4fd-fe493758d4f2.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2208.929322] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-911307c8-9175-42ba-a34b-64ef824e0952 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.950145] env[62684]: DEBUG oslo_vmware.api [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Waiting for the task: (returnval){ [ 2208.950145] env[62684]: value = "task-2053569" [ 2208.950145] env[62684]: _type = "Task" [ 2208.950145] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2208.959427] env[62684]: DEBUG oslo_vmware.api [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Task: {'id': task-2053569, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2208.961928] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2208.962206] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3c4d6a74-b862-4d97-b6ce-928a916c6bc4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.968159] env[62684]: DEBUG oslo_vmware.api [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2208.968159] env[62684]: value = "task-2053570" [ 2208.968159] env[62684]: _type = "Task" [ 2208.968159] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2208.976281] env[62684]: DEBUG oslo_vmware.api [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053570, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2209.036542] env[62684]: DEBUG oslo_vmware.api [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Task: {'id': task-2053566, 'name': PowerOnVM_Task, 'duration_secs': 1.028473} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2209.036850] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2209.037174] env[62684]: INFO nova.compute.manager [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Took 8.73 seconds to spawn the instance on the hypervisor. [ 2209.037487] env[62684]: DEBUG nova.compute.manager [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2209.038551] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd1b1ff3-34c8-462c-9836-ba762ec327e0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.187047] env[62684]: DEBUG oslo_vmware.api [None req-dc53c289-04dc-487f-8818-dc3eab72d10e tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053568, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2209.232764] env[62684]: DEBUG nova.compute.manager [req-68837d2c-9691-49b8-a1a0-9e8a83ce64c5 req-d3f71fed-d968-4206-bc2e-74772445fb1d service nova] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Received event network-vif-deleted-fe223d4f-0585-454f-b724-0cdff1d2ceea {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2209.234415] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0bff8640-da66-4a02-8617-c9e18d8c1b5c tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2209.379490] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fbe2dbd9-6af0-41d5-8fdf-420c3cb84300 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2209.380654] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e0e15a31-e358-4807-8450-d6365716a5ca tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.918s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2209.380923] env[62684]: DEBUG nova.objects.instance [None req-e0e15a31-e358-4807-8450-d6365716a5ca tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lazy-loading 'resources' on Instance uuid ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2209.401084] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ec4536ad-6988-4d77-ae22-806ca624acc6 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Acquiring lock "584845d2-d146-42bf-8ef5-58532fe24f65" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2209.401330] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ec4536ad-6988-4d77-ae22-806ca624acc6 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "584845d2-d146-42bf-8ef5-58532fe24f65" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2209.459992] env[62684]: DEBUG oslo_vmware.api [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Task: {'id': task-2053569, 'name': ReconfigVM_Task, 'duration_secs': 0.295186} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2209.460369] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Reconfigured VM instance instance-00000066 to attach disk [datastore2] 58e67d8e-900e-4d22-a4fd-fe493758d4f2/58e67d8e-900e-4d22-a4fd-fe493758d4f2.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2209.460974] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9ffaad31-6e1d-475f-b799-7f265522d982 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.467764] env[62684]: DEBUG oslo_vmware.api [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Waiting for the task: (returnval){ [ 2209.467764] env[62684]: value = "task-2053571" [ 2209.467764] env[62684]: _type = "Task" [ 2209.467764] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2209.480733] env[62684]: DEBUG oslo_vmware.api [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053570, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2209.480919] env[62684]: DEBUG oslo_vmware.api [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Task: {'id': task-2053571, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2209.557353] env[62684]: INFO nova.compute.manager [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Took 16.72 seconds to build instance. [ 2209.579647] env[62684]: DEBUG nova.network.neutron [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Updating instance_info_cache with network_info: [{"id": "34ae3a7e-83a9-4ebd-8582-bb73f3050948", "address": "fa:16:3e:fc:ef:f4", "network": {"id": "7678b347-6a54-4b84-9a4d-b566bbeb1ea4", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-51664912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76d88ac878d44480b3b54b24ab87efa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34ae3a7e-83", "ovs_interfaceid": "34ae3a7e-83a9-4ebd-8582-bb73f3050948", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2209.686626] env[62684]: DEBUG oslo_vmware.api [None req-dc53c289-04dc-487f-8818-dc3eab72d10e tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053568, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2209.904461] env[62684]: DEBUG nova.compute.utils [None req-ec4536ad-6988-4d77-ae22-806ca624acc6 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2209.980431] env[62684]: DEBUG oslo_vmware.api [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Task: {'id': task-2053571, 'name': Rename_Task, 'duration_secs': 0.133835} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2209.980712] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2209.980983] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4aa7cd15-1621-4828-935d-504853fa550a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.984935] env[62684]: DEBUG oslo_vmware.api [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053570, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2209.992309] env[62684]: DEBUG oslo_vmware.api [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Waiting for the task: (returnval){ [ 2209.992309] env[62684]: value = "task-2053572" [ 2209.992309] env[62684]: _type = "Task" [ 2209.992309] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2209.999734] env[62684]: DEBUG oslo_vmware.api [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Task: {'id': task-2053572, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2210.059681] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2ce59a62-018a-4b4c-b3c6-a79525d5fc14 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Lock "3ff55331-6d5c-4558-b932-e266670f2ac9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.238s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2210.082737] env[62684]: DEBUG oslo_concurrency.lockutils [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Releasing lock "refresh_cache-264c6900-dbef-455e-95cc-1df73c735cc8" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2210.125777] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac840e2d-7683-49fa-ae32-7bee91d52c6b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.135621] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5350488b-5dfe-486b-a58a-4924f76693cd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.165626] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d4af2c-d6e3-4725-9aab-e2fb3c6bfc1d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.174609] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-669c53a4-8183-456d-af78-838178e39764 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.193344] env[62684]: DEBUG nova.compute.provider_tree [None req-e0e15a31-e358-4807-8450-d6365716a5ca tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2210.201351] env[62684]: DEBUG oslo_vmware.api [None req-dc53c289-04dc-487f-8818-dc3eab72d10e tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053568, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2210.410033] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ec4536ad-6988-4d77-ae22-806ca624acc6 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "584845d2-d146-42bf-8ef5-58532fe24f65" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2210.484245] env[62684]: DEBUG oslo_vmware.api [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053570, 'name': PowerOffVM_Task, 'duration_secs': 1.290228} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2210.484245] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2210.484783] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-171d284e-52d6-4b2e-878d-5c1cfc3efb69 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.508199] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cea910b-61c5-400c-aed8-277cf5b73d4a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.519890] env[62684]: DEBUG oslo_vmware.api [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Task: {'id': task-2053572, 'name': PowerOnVM_Task, 'duration_secs': 0.525232} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2210.522462] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2210.522700] env[62684]: INFO nova.compute.manager [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Took 7.82 seconds to spawn the instance on the hypervisor. [ 2210.522868] env[62684]: DEBUG nova.compute.manager [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2210.523829] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b8b76ea-44cc-44ee-b50e-059275dbe0ee {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.691563] env[62684]: DEBUG oslo_vmware.api [None req-dc53c289-04dc-487f-8818-dc3eab72d10e tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053568, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2210.703685] env[62684]: DEBUG nova.scheduler.client.report [None req-e0e15a31-e358-4807-8450-d6365716a5ca tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2211.028237] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Creating Snapshot of the VM instance {{(pid=62684) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2211.028605] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-b4c178a2-3d97-4806-a178-197e8c7c6a9d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.040282] env[62684]: DEBUG oslo_vmware.api [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2211.040282] env[62684]: value = "task-2053573" [ 2211.040282] env[62684]: _type = "Task" [ 2211.040282] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2211.046796] env[62684]: INFO nova.compute.manager [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Took 16.99 seconds to build instance. [ 2211.053990] env[62684]: DEBUG oslo_vmware.api [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053573, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2211.192511] env[62684]: DEBUG oslo_vmware.api [None req-dc53c289-04dc-487f-8818-dc3eab72d10e tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053568, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2211.209513] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e0e15a31-e358-4807-8450-d6365716a5ca tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.829s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2211.212281] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0bff8640-da66-4a02-8617-c9e18d8c1b5c tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.978s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2211.212658] env[62684]: DEBUG nova.objects.instance [None req-0bff8640-da66-4a02-8617-c9e18d8c1b5c tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lazy-loading 'resources' on Instance uuid 99a9653c-7221-4495-be5f-5441dc8da0f4 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2211.234840] env[62684]: INFO nova.scheduler.client.report [None req-e0e15a31-e358-4807-8450-d6365716a5ca tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Deleted allocations for instance ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1 [ 2211.474803] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ec4536ad-6988-4d77-ae22-806ca624acc6 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Acquiring lock "584845d2-d146-42bf-8ef5-58532fe24f65" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2211.475302] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ec4536ad-6988-4d77-ae22-806ca624acc6 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "584845d2-d146-42bf-8ef5-58532fe24f65" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2211.475678] env[62684]: INFO nova.compute.manager [None req-ec4536ad-6988-4d77-ae22-806ca624acc6 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Attaching volume 93dd9c99-f5f4-43f7-8461-d3e47a775078 to /dev/sdb [ 2211.515101] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b286a33-1119-4fd9-a25d-f950315cb58a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.523591] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e14c1355-87e0-4fde-ac4d-9c75b7cce255 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.538578] env[62684]: DEBUG nova.virt.block_device [None req-ec4536ad-6988-4d77-ae22-806ca624acc6 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Updating existing volume attachment record: bf61d0a0-0b46-49a4-ae7c-0fe830ca93a2 {{(pid=62684) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2211.549156] env[62684]: DEBUG oslo_concurrency.lockutils [None req-43e61e8f-6e1a-4295-9b58-7a852c78aa7b tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Lock "58e67d8e-900e-4d22-a4fd-fe493758d4f2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.498s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2211.549498] env[62684]: DEBUG oslo_vmware.api [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053573, 'name': CreateSnapshot_Task, 'duration_secs': 0.47663} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2211.549779] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Created Snapshot of the VM instance {{(pid=62684) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2211.550658] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32fe6ad0-5fb2-4ff4-ac5a-605246443768 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.601018] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7693dd95-0e97-475a-8fd1-779af1068c22 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.620329] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Updating instance '264c6900-dbef-455e-95cc-1df73c735cc8' progress to 0 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2211.695686] env[62684]: DEBUG oslo_vmware.api [None req-dc53c289-04dc-487f-8818-dc3eab72d10e tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053568, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2211.743100] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e0e15a31-e358-4807-8450-d6365716a5ca tempest-ServersTestJSON-828328252 tempest-ServersTestJSON-828328252-project-member] Lock "ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.378s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2211.795832] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ab30a1be-e837-418f-b120-3a8aa59e01f8 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Acquiring lock "3ff55331-6d5c-4558-b932-e266670f2ac9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2211.796138] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ab30a1be-e837-418f-b120-3a8aa59e01f8 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Lock "3ff55331-6d5c-4558-b932-e266670f2ac9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2211.796385] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ab30a1be-e837-418f-b120-3a8aa59e01f8 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Acquiring lock "3ff55331-6d5c-4558-b932-e266670f2ac9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2211.796604] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ab30a1be-e837-418f-b120-3a8aa59e01f8 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Lock "3ff55331-6d5c-4558-b932-e266670f2ac9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2211.796810] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ab30a1be-e837-418f-b120-3a8aa59e01f8 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Lock "3ff55331-6d5c-4558-b932-e266670f2ac9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2211.800982] env[62684]: INFO nova.compute.manager [None req-ab30a1be-e837-418f-b120-3a8aa59e01f8 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Terminating instance [ 2211.805803] env[62684]: DEBUG nova.compute.manager [None req-ab30a1be-e837-418f-b120-3a8aa59e01f8 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2211.805990] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ab30a1be-e837-418f-b120-3a8aa59e01f8 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2211.806925] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0e87486-1aac-4968-91b1-6fb819662354 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.815293] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab30a1be-e837-418f-b120-3a8aa59e01f8 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2211.815480] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ea07d14f-9552-4cac-aba8-87fe203bcdd9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.821476] env[62684]: DEBUG oslo_vmware.api [None req-ab30a1be-e837-418f-b120-3a8aa59e01f8 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Waiting for the task: (returnval){ [ 2211.821476] env[62684]: value = "task-2053575" [ 2211.821476] env[62684]: _type = "Task" [ 2211.821476] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2211.831670] env[62684]: DEBUG oslo_vmware.api [None req-ab30a1be-e837-418f-b120-3a8aa59e01f8 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Task: {'id': task-2053575, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2211.945607] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4e7001c-2584-4963-97fa-0162a3b328f2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.953645] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f88745fb-fea5-45c7-8221-66ff77bfb06d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.985670] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44eb2ff2-c3f2-464c-a1f1-2c01a1f6d3d9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.993653] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-072ea03d-78dd-48fe-b158-4e35606e80e9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.007309] env[62684]: DEBUG nova.compute.provider_tree [None req-0bff8640-da66-4a02-8617-c9e18d8c1b5c tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2212.068605] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Creating linked-clone VM from snapshot {{(pid=62684) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2212.068959] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-5d2ba88a-7aca-404f-9eca-232464ebb968 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.077136] env[62684]: DEBUG oslo_vmware.api [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2212.077136] env[62684]: value = "task-2053576" [ 2212.077136] env[62684]: _type = "Task" [ 2212.077136] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2212.085832] env[62684]: DEBUG oslo_vmware.api [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053576, 'name': CloneVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2212.126817] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2212.127204] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6ac3be17-fb4d-4b56-b442-84c933a398f6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.134221] env[62684]: DEBUG oslo_vmware.api [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2212.134221] env[62684]: value = "task-2053577" [ 2212.134221] env[62684]: _type = "Task" [ 2212.134221] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2212.142519] env[62684]: DEBUG oslo_vmware.api [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053577, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2212.197159] env[62684]: DEBUG oslo_vmware.api [None req-dc53c289-04dc-487f-8818-dc3eab72d10e tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053568, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2212.274636] env[62684]: DEBUG oslo_concurrency.lockutils [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Acquiring lock "58e67d8e-900e-4d22-a4fd-fe493758d4f2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2212.275368] env[62684]: DEBUG oslo_concurrency.lockutils [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Lock "58e67d8e-900e-4d22-a4fd-fe493758d4f2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2212.275761] env[62684]: DEBUG oslo_concurrency.lockutils [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Acquiring lock "58e67d8e-900e-4d22-a4fd-fe493758d4f2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2212.276059] env[62684]: DEBUG oslo_concurrency.lockutils [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Lock "58e67d8e-900e-4d22-a4fd-fe493758d4f2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2212.276335] env[62684]: DEBUG oslo_concurrency.lockutils [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Lock "58e67d8e-900e-4d22-a4fd-fe493758d4f2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2212.278907] env[62684]: INFO nova.compute.manager [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Terminating instance [ 2212.283162] env[62684]: DEBUG nova.compute.manager [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2212.283432] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2212.284706] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e349ad2e-e456-4867-b08b-43e3a518368d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.293212] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2212.293519] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fde5955a-a633-440c-a2d2-26aafe517365 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.300142] env[62684]: DEBUG oslo_vmware.api [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Waiting for the task: (returnval){ [ 2212.300142] env[62684]: value = "task-2053578" [ 2212.300142] env[62684]: _type = "Task" [ 2212.300142] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2212.308268] env[62684]: DEBUG oslo_vmware.api [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Task: {'id': task-2053578, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2212.333512] env[62684]: DEBUG oslo_vmware.api [None req-ab30a1be-e837-418f-b120-3a8aa59e01f8 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Task: {'id': task-2053575, 'name': PowerOffVM_Task, 'duration_secs': 0.21636} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2212.333889] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab30a1be-e837-418f-b120-3a8aa59e01f8 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2212.334097] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ab30a1be-e837-418f-b120-3a8aa59e01f8 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2212.334418] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2728b12c-9777-47cc-ae87-bb9eea47dba3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.513288] env[62684]: DEBUG nova.scheduler.client.report [None req-0bff8640-da66-4a02-8617-c9e18d8c1b5c tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2212.587491] env[62684]: DEBUG oslo_vmware.api [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053576, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2212.643250] env[62684]: DEBUG oslo_vmware.api [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053577, 'name': PowerOffVM_Task, 'duration_secs': 0.210698} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2212.644195] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2212.644195] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Updating instance '264c6900-dbef-455e-95cc-1df73c735cc8' progress to 17 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2212.698181] env[62684]: DEBUG oslo_vmware.api [None req-dc53c289-04dc-487f-8818-dc3eab72d10e tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053568, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2212.810339] env[62684]: DEBUG oslo_vmware.api [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Task: {'id': task-2053578, 'name': PowerOffVM_Task, 'duration_secs': 0.179334} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2212.810617] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2212.810791] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2212.811063] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-59afd11d-1a5f-46de-b5eb-4930a44b4416 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.020019] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0bff8640-da66-4a02-8617-c9e18d8c1b5c tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.808s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2213.050705] env[62684]: INFO nova.scheduler.client.report [None req-0bff8640-da66-4a02-8617-c9e18d8c1b5c tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Deleted allocations for instance 99a9653c-7221-4495-be5f-5441dc8da0f4 [ 2213.096534] env[62684]: DEBUG oslo_vmware.api [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053576, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2213.150561] env[62684]: DEBUG nova.virt.hardware [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2213.150820] env[62684]: DEBUG nova.virt.hardware [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2213.150978] env[62684]: DEBUG nova.virt.hardware [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2213.151184] env[62684]: DEBUG nova.virt.hardware [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2213.151422] env[62684]: DEBUG nova.virt.hardware [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2213.151578] env[62684]: DEBUG nova.virt.hardware [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2213.151780] env[62684]: DEBUG nova.virt.hardware [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2213.151937] env[62684]: DEBUG nova.virt.hardware [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2213.152118] env[62684]: DEBUG nova.virt.hardware [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2213.152288] env[62684]: DEBUG nova.virt.hardware [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2213.152511] env[62684]: DEBUG nova.virt.hardware [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2213.158326] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1a853192-04a8-4e5a-ad07-fd935d07a253 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.179399] env[62684]: DEBUG oslo_vmware.api [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2213.179399] env[62684]: value = "task-2053581" [ 2213.179399] env[62684]: _type = "Task" [ 2213.179399] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2213.187922] env[62684]: DEBUG oslo_vmware.api [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053581, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2213.196669] env[62684]: DEBUG oslo_vmware.api [None req-dc53c289-04dc-487f-8818-dc3eab72d10e tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053568, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2213.559908] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0bff8640-da66-4a02-8617-c9e18d8c1b5c tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "99a9653c-7221-4495-be5f-5441dc8da0f4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.084s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2213.588786] env[62684]: DEBUG oslo_vmware.api [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053576, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2213.690142] env[62684]: DEBUG oslo_vmware.api [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053581, 'name': ReconfigVM_Task, 'duration_secs': 0.246151} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2213.693326] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Updating instance '264c6900-dbef-455e-95cc-1df73c735cc8' progress to 33 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2213.701533] env[62684]: DEBUG oslo_vmware.api [None req-dc53c289-04dc-487f-8818-dc3eab72d10e tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053568, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2214.090165] env[62684]: DEBUG oslo_vmware.api [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053576, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2214.207597] env[62684]: DEBUG nova.virt.hardware [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2214.207885] env[62684]: DEBUG nova.virt.hardware [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2214.208126] env[62684]: DEBUG nova.virt.hardware [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2214.208371] env[62684]: DEBUG nova.virt.hardware [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2214.210592] env[62684]: DEBUG nova.virt.hardware [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2214.210592] env[62684]: DEBUG nova.virt.hardware [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2214.210592] env[62684]: DEBUG nova.virt.hardware [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2214.210592] env[62684]: DEBUG nova.virt.hardware [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2214.210592] env[62684]: DEBUG nova.virt.hardware [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2214.210592] env[62684]: DEBUG nova.virt.hardware [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2214.210592] env[62684]: DEBUG nova.virt.hardware [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2214.215329] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Reconfiguring VM instance instance-00000061 to detach disk 2000 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2214.215699] env[62684]: DEBUG oslo_vmware.api [None req-dc53c289-04dc-487f-8818-dc3eab72d10e tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053568, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2214.216236] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-191793b3-13c7-4b57-8385-7710a13e1e08 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.238043] env[62684]: DEBUG oslo_vmware.api [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2214.238043] env[62684]: value = "task-2053583" [ 2214.238043] env[62684]: _type = "Task" [ 2214.238043] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2214.248048] env[62684]: DEBUG oslo_vmware.api [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053583, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2214.590643] env[62684]: DEBUG oslo_vmware.api [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053576, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2214.704990] env[62684]: DEBUG oslo_vmware.api [None req-dc53c289-04dc-487f-8818-dc3eab72d10e tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053568, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2214.747675] env[62684]: DEBUG oslo_vmware.api [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053583, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2215.094025] env[62684]: DEBUG oslo_vmware.api [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053576, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2215.205636] env[62684]: DEBUG oslo_vmware.api [None req-dc53c289-04dc-487f-8818-dc3eab72d10e tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053568, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2215.249570] env[62684]: DEBUG oslo_vmware.api [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053583, 'name': ReconfigVM_Task, 'duration_secs': 0.842146} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2215.249907] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Reconfigured VM instance instance-00000061 to detach disk 2000 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2215.250766] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4553c1a8-9d1b-4d31-b5b4-7c29b88f207d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.273917] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] 264c6900-dbef-455e-95cc-1df73c735cc8/264c6900-dbef-455e-95cc-1df73c735cc8.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2215.274224] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bb1738e1-81f7-4f4c-a839-7b2befd1473c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.292634] env[62684]: DEBUG oslo_vmware.api [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2215.292634] env[62684]: value = "task-2053584" [ 2215.292634] env[62684]: _type = "Task" [ 2215.292634] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2215.302937] env[62684]: DEBUG oslo_vmware.api [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053584, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2215.593818] env[62684]: DEBUG oslo_vmware.api [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053576, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2215.704807] env[62684]: DEBUG oslo_vmware.api [None req-dc53c289-04dc-487f-8818-dc3eab72d10e tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053568, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2215.805115] env[62684]: DEBUG oslo_vmware.api [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053584, 'name': ReconfigVM_Task, 'duration_secs': 0.262864} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2215.805560] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Reconfigured VM instance instance-00000061 to attach disk [datastore1] 264c6900-dbef-455e-95cc-1df73c735cc8/264c6900-dbef-455e-95cc-1df73c735cc8.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2215.807500] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Updating instance '264c6900-dbef-455e-95cc-1df73c735cc8' progress to 50 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2216.094464] env[62684]: DEBUG oslo_vmware.api [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053576, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2216.095667] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec4536ad-6988-4d77-ae22-806ca624acc6 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Volume attach. Driver type: vmdk {{(pid=62684) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2216.095948] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec4536ad-6988-4d77-ae22-806ca624acc6 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421388', 'volume_id': '93dd9c99-f5f4-43f7-8461-d3e47a775078', 'name': 'volume-93dd9c99-f5f4-43f7-8461-d3e47a775078', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '584845d2-d146-42bf-8ef5-58532fe24f65', 'attached_at': '', 'detached_at': '', 'volume_id': '93dd9c99-f5f4-43f7-8461-d3e47a775078', 'serial': '93dd9c99-f5f4-43f7-8461-d3e47a775078'} {{(pid=62684) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2216.096790] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1da54b63-1ce4-4122-9770-da95f8efde50 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.112785] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f71b0041-800d-4239-9fe0-e7f3b3437bf9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.142024] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec4536ad-6988-4d77-ae22-806ca624acc6 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] volume-93dd9c99-f5f4-43f7-8461-d3e47a775078/volume-93dd9c99-f5f4-43f7-8461-d3e47a775078.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2216.142024] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-90192b1c-06d7-4907-98a9-f033f6e0a5e2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.167432] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "587edf89-2ea0-4b89-8830-fa766b798398" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2216.167795] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "587edf89-2ea0-4b89-8830-fa766b798398" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2216.169174] env[62684]: DEBUG oslo_vmware.api [None req-ec4536ad-6988-4d77-ae22-806ca624acc6 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Waiting for the task: (returnval){ [ 2216.169174] env[62684]: value = "task-2053585" [ 2216.169174] env[62684]: _type = "Task" [ 2216.169174] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2216.179744] env[62684]: DEBUG oslo_vmware.api [None req-ec4536ad-6988-4d77-ae22-806ca624acc6 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053585, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2216.209298] env[62684]: DEBUG oslo_vmware.api [None req-dc53c289-04dc-487f-8818-dc3eab72d10e tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053568, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2216.314678] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc82091a-eb5c-4bf6-b28b-dcee15c8446d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.335086] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66341838-01af-4464-ae75-76d8459d5349 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.352398] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Updating instance '264c6900-dbef-455e-95cc-1df73c735cc8' progress to 67 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2216.594164] env[62684]: DEBUG oslo_vmware.api [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053576, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2216.670459] env[62684]: DEBUG nova.compute.manager [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2216.683923] env[62684]: DEBUG oslo_vmware.api [None req-ec4536ad-6988-4d77-ae22-806ca624acc6 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053585, 'name': ReconfigVM_Task, 'duration_secs': 0.348266} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2216.684272] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec4536ad-6988-4d77-ae22-806ca624acc6 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Reconfigured VM instance instance-0000005e to attach disk [datastore1] volume-93dd9c99-f5f4-43f7-8461-d3e47a775078/volume-93dd9c99-f5f4-43f7-8461-d3e47a775078.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2216.689095] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f652da38-136c-4308-b29d-db87596d8e05 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.708295] env[62684]: DEBUG oslo_vmware.api [None req-dc53c289-04dc-487f-8818-dc3eab72d10e tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053568, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2216.709922] env[62684]: DEBUG oslo_vmware.api [None req-ec4536ad-6988-4d77-ae22-806ca624acc6 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Waiting for the task: (returnval){ [ 2216.709922] env[62684]: value = "task-2053586" [ 2216.709922] env[62684]: _type = "Task" [ 2216.709922] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2216.720705] env[62684]: DEBUG oslo_vmware.api [None req-ec4536ad-6988-4d77-ae22-806ca624acc6 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053586, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2216.896743] env[62684]: DEBUG nova.network.neutron [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Port 34ae3a7e-83a9-4ebd-8582-bb73f3050948 binding to destination host cpu-1 is already ACTIVE {{(pid=62684) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2217.094827] env[62684]: DEBUG oslo_vmware.api [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053576, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2217.196501] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2217.196824] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2217.198871] env[62684]: INFO nova.compute.claims [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2217.210931] env[62684]: DEBUG oslo_vmware.api [None req-dc53c289-04dc-487f-8818-dc3eab72d10e tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053568, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2217.218650] env[62684]: DEBUG oslo_vmware.api [None req-ec4536ad-6988-4d77-ae22-806ca624acc6 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053586, 'name': ReconfigVM_Task, 'duration_secs': 0.133753} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2217.218996] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec4536ad-6988-4d77-ae22-806ca624acc6 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421388', 'volume_id': '93dd9c99-f5f4-43f7-8461-d3e47a775078', 'name': 'volume-93dd9c99-f5f4-43f7-8461-d3e47a775078', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '584845d2-d146-42bf-8ef5-58532fe24f65', 'attached_at': '', 'detached_at': '', 'volume_id': '93dd9c99-f5f4-43f7-8461-d3e47a775078', 'serial': '93dd9c99-f5f4-43f7-8461-d3e47a775078'} {{(pid=62684) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2217.351818] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Acquiring lock "9f1e9ae9-c082-4fbe-bd21-6e14e40962c1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2217.352064] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Lock "9f1e9ae9-c082-4fbe-bd21-6e14e40962c1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2217.563485] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ab30a1be-e837-418f-b120-3a8aa59e01f8 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2217.563829] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ab30a1be-e837-418f-b120-3a8aa59e01f8 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2217.563913] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab30a1be-e837-418f-b120-3a8aa59e01f8 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Deleting the datastore file [datastore2] 3ff55331-6d5c-4558-b932-e266670f2ac9 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2217.564244] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3badb9ae-2321-4a8c-a092-0bff20a3c31b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.570671] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2217.570962] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2217.571252] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Deleting the datastore file [datastore2] 58e67d8e-900e-4d22-a4fd-fe493758d4f2 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2217.571605] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e47e1b6d-c146-4620-bea8-c17aaf6deec1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.574616] env[62684]: DEBUG oslo_vmware.api [None req-ab30a1be-e837-418f-b120-3a8aa59e01f8 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Waiting for the task: (returnval){ [ 2217.574616] env[62684]: value = "task-2053587" [ 2217.574616] env[62684]: _type = "Task" [ 2217.574616] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2217.580730] env[62684]: DEBUG oslo_vmware.api [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Waiting for the task: (returnval){ [ 2217.580730] env[62684]: value = "task-2053588" [ 2217.580730] env[62684]: _type = "Task" [ 2217.580730] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2217.587548] env[62684]: DEBUG oslo_vmware.api [None req-ab30a1be-e837-418f-b120-3a8aa59e01f8 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Task: {'id': task-2053587, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2217.595070] env[62684]: DEBUG oslo_vmware.api [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Task: {'id': task-2053588, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2217.601054] env[62684]: DEBUG oslo_vmware.api [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053576, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2217.719975] env[62684]: DEBUG oslo_vmware.api [None req-dc53c289-04dc-487f-8818-dc3eab72d10e tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053568, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2217.854626] env[62684]: DEBUG nova.compute.manager [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2217.927458] env[62684]: DEBUG oslo_concurrency.lockutils [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "264c6900-dbef-455e-95cc-1df73c735cc8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2217.927458] env[62684]: DEBUG oslo_concurrency.lockutils [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "264c6900-dbef-455e-95cc-1df73c735cc8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2217.927458] env[62684]: DEBUG oslo_concurrency.lockutils [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "264c6900-dbef-455e-95cc-1df73c735cc8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2218.084493] env[62684]: DEBUG oslo_vmware.api [None req-ab30a1be-e837-418f-b120-3a8aa59e01f8 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Task: {'id': task-2053587, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160768} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2218.087419] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab30a1be-e837-418f-b120-3a8aa59e01f8 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2218.087612] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ab30a1be-e837-418f-b120-3a8aa59e01f8 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2218.087828] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ab30a1be-e837-418f-b120-3a8aa59e01f8 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2218.088008] env[62684]: INFO nova.compute.manager [None req-ab30a1be-e837-418f-b120-3a8aa59e01f8 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Took 6.28 seconds to destroy the instance on the hypervisor. [ 2218.088253] env[62684]: DEBUG oslo.service.loopingcall [None req-ab30a1be-e837-418f-b120-3a8aa59e01f8 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2218.088515] env[62684]: DEBUG nova.compute.manager [-] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2218.088614] env[62684]: DEBUG nova.network.neutron [-] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2218.096983] env[62684]: DEBUG oslo_vmware.api [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Task: {'id': task-2053588, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150823} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2218.097643] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2218.097882] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2218.098122] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2218.098320] env[62684]: INFO nova.compute.manager [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Took 5.81 seconds to destroy the instance on the hypervisor. [ 2218.098561] env[62684]: DEBUG oslo.service.loopingcall [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2218.101983] env[62684]: DEBUG nova.compute.manager [-] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2218.102092] env[62684]: DEBUG nova.network.neutron [-] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2218.103668] env[62684]: DEBUG oslo_vmware.api [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053576, 'name': CloneVM_Task} progress is 95%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2218.218303] env[62684]: DEBUG oslo_vmware.api [None req-dc53c289-04dc-487f-8818-dc3eab72d10e tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053568, 'name': ReconfigVM_Task, 'duration_secs': 9.084867} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2218.222049] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dc53c289-04dc-487f-8818-dc3eab72d10e tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Releasing lock "d7f09d0e-f7b6-415e-8d82-47eba1153aa1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2218.222049] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-dc53c289-04dc-487f-8818-dc3eab72d10e tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Reconfigured VM to detach interface {{(pid=62684) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 2218.262049] env[62684]: DEBUG nova.objects.instance [None req-ec4536ad-6988-4d77-ae22-806ca624acc6 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lazy-loading 'flavor' on Instance uuid 584845d2-d146-42bf-8ef5-58532fe24f65 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2218.384346] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2218.423754] env[62684]: DEBUG nova.compute.manager [req-7d300b42-92e1-4feb-828a-ccee045b2032 req-7c924ea2-f661-461b-aec2-d5a8ae77ec00 service nova] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Received event network-vif-deleted-e9e4e88f-b0c7-406a-b45c-3fd6cfcb854b {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2218.424072] env[62684]: INFO nova.compute.manager [req-7d300b42-92e1-4feb-828a-ccee045b2032 req-7c924ea2-f661-461b-aec2-d5a8ae77ec00 service nova] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Neutron deleted interface e9e4e88f-b0c7-406a-b45c-3fd6cfcb854b; detaching it from the instance and deleting it from the info cache [ 2218.424501] env[62684]: DEBUG nova.network.neutron [req-7d300b42-92e1-4feb-828a-ccee045b2032 req-7c924ea2-f661-461b-aec2-d5a8ae77ec00 service nova] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2218.516030] env[62684]: DEBUG oslo_concurrency.lockutils [None req-79dce794-2b9e-4ff0-b8bd-5994dc121753 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquiring lock "2baabe7a-ed33-4cef-9acc-a7b804610b0a" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2218.516030] env[62684]: DEBUG oslo_concurrency.lockutils [None req-79dce794-2b9e-4ff0-b8bd-5994dc121753 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lock "2baabe7a-ed33-4cef-9acc-a7b804610b0a" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2218.523579] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0262f089-b232-4a1a-8b66-389c65e2f6f7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.533699] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edc0c136-c58c-4c19-b3e7-bf4769243ad1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.572106] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-637ebb93-5e53-415d-b9e0-70b71271cb6a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.577193] env[62684]: DEBUG nova.compute.manager [req-c720e7c8-e765-45e4-8bfa-c248b0e32c37 req-248ac7d1-4827-47c2-bba8-36aee15aa5a6 service nova] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Received event network-vif-deleted-d95acbd4-4387-4053-9921-da1c96923d5c {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2218.577450] env[62684]: INFO nova.compute.manager [req-c720e7c8-e765-45e4-8bfa-c248b0e32c37 req-248ac7d1-4827-47c2-bba8-36aee15aa5a6 service nova] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Neutron deleted interface d95acbd4-4387-4053-9921-da1c96923d5c; detaching it from the instance and deleting it from the info cache [ 2218.577666] env[62684]: DEBUG nova.network.neutron [req-c720e7c8-e765-45e4-8bfa-c248b0e32c37 req-248ac7d1-4827-47c2-bba8-36aee15aa5a6 service nova] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2218.585271] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15c2f229-49ee-4c61-830a-af0ea51930ec {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.608138] env[62684]: DEBUG nova.compute.provider_tree [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2218.609477] env[62684]: DEBUG oslo_vmware.api [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053576, 'name': CloneVM_Task, 'duration_secs': 6.170402} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2218.609722] env[62684]: INFO nova.virt.vmwareapi.vmops [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Created linked-clone VM from snapshot [ 2218.613506] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b80a16c-43bd-4bae-a082-6e6c8297bd1d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.618488] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Uploading image 4cdf859f-d519-41eb-8ee6-7f85e8faee25 {{(pid=62684) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2218.646872] env[62684]: DEBUG oslo_vmware.rw_handles [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2218.646872] env[62684]: value = "vm-421389" [ 2218.646872] env[62684]: _type = "VirtualMachine" [ 2218.646872] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2218.647475] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-5b4b2fc8-b1a9-4581-988a-1884bd8a6a75 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.658161] env[62684]: DEBUG oslo_vmware.rw_handles [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lease: (returnval){ [ 2218.658161] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5215a95c-addb-9eb1-918b-d5f5cbebd344" [ 2218.658161] env[62684]: _type = "HttpNfcLease" [ 2218.658161] env[62684]: } obtained for exporting VM: (result){ [ 2218.658161] env[62684]: value = "vm-421389" [ 2218.658161] env[62684]: _type = "VirtualMachine" [ 2218.658161] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2218.658408] env[62684]: DEBUG oslo_vmware.api [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the lease: (returnval){ [ 2218.658408] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5215a95c-addb-9eb1-918b-d5f5cbebd344" [ 2218.658408] env[62684]: _type = "HttpNfcLease" [ 2218.658408] env[62684]: } to be ready. {{(pid=62684) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2218.665164] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2218.665164] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5215a95c-addb-9eb1-918b-d5f5cbebd344" [ 2218.665164] env[62684]: _type = "HttpNfcLease" [ 2218.665164] env[62684]: } is initializing. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2218.773031] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ec4536ad-6988-4d77-ae22-806ca624acc6 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "584845d2-d146-42bf-8ef5-58532fe24f65" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.298s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2218.900714] env[62684]: DEBUG nova.network.neutron [-] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2218.930157] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cbd8b3cc-0050-488d-83ff-dc464cbbeee9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.941672] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d08fdba1-0488-496c-a944-b0e285c3a030 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.974826] env[62684]: DEBUG nova.compute.manager [req-7d300b42-92e1-4feb-828a-ccee045b2032 req-7c924ea2-f661-461b-aec2-d5a8ae77ec00 service nova] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Detach interface failed, port_id=e9e4e88f-b0c7-406a-b45c-3fd6cfcb854b, reason: Instance 58e67d8e-900e-4d22-a4fd-fe493758d4f2 could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2218.975941] env[62684]: DEBUG oslo_concurrency.lockutils [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "refresh_cache-264c6900-dbef-455e-95cc-1df73c735cc8" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2218.976136] env[62684]: DEBUG oslo_concurrency.lockutils [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquired lock "refresh_cache-264c6900-dbef-455e-95cc-1df73c735cc8" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2218.976318] env[62684]: DEBUG nova.network.neutron [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2219.018601] env[62684]: INFO nova.compute.manager [None req-79dce794-2b9e-4ff0-b8bd-5994dc121753 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Detaching volume 004e22f3-56ef-4e39-994c-bd13ad8bbc11 [ 2219.050043] env[62684]: INFO nova.virt.block_device [None req-79dce794-2b9e-4ff0-b8bd-5994dc121753 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Attempting to driver detach volume 004e22f3-56ef-4e39-994c-bd13ad8bbc11 from mountpoint /dev/sdb [ 2219.050043] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-79dce794-2b9e-4ff0-b8bd-5994dc121753 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Volume detach. Driver type: vmdk {{(pid=62684) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2219.050043] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-79dce794-2b9e-4ff0-b8bd-5994dc121753 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421371', 'volume_id': '004e22f3-56ef-4e39-994c-bd13ad8bbc11', 'name': 'volume-004e22f3-56ef-4e39-994c-bd13ad8bbc11', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2baabe7a-ed33-4cef-9acc-a7b804610b0a', 'attached_at': '', 'detached_at': '', 'volume_id': '004e22f3-56ef-4e39-994c-bd13ad8bbc11', 'serial': '004e22f3-56ef-4e39-994c-bd13ad8bbc11'} {{(pid=62684) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2219.050722] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cdda5c7-34b5-4987-aedd-6e15e18450f7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2219.072019] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a594a52-7055-4b0b-80bf-2f639bd1abce {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2219.078747] env[62684]: DEBUG nova.network.neutron [-] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2219.080706] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ebe6957-b9a8-4e07-91c7-75a496098b4d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2219.083276] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b53cd7a9-6cb7-403d-bb38-54a1a5cfaf70 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2219.104010] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88ba4033-f893-4f83-9af3-f1ba3019b08b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2219.109422] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-239fed18-9bb8-47ea-8379-ede187a99b0d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2219.121156] env[62684]: DEBUG nova.scheduler.client.report [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2219.136984] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-79dce794-2b9e-4ff0-b8bd-5994dc121753 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] The volume has not been displaced from its original location: [datastore2] volume-004e22f3-56ef-4e39-994c-bd13ad8bbc11/volume-004e22f3-56ef-4e39-994c-bd13ad8bbc11.vmdk. No consolidation needed. {{(pid=62684) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2219.142103] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-79dce794-2b9e-4ff0-b8bd-5994dc121753 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Reconfiguring VM instance instance-0000003f to detach disk 2001 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2219.143025] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.946s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2219.143539] env[62684]: DEBUG nova.compute.manager [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2219.150024] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f817e717-fa72-43db-aa5a-de0310421e0c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2219.159608] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.775s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2219.161681] env[62684]: INFO nova.compute.claims [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2219.183390] env[62684]: DEBUG nova.compute.manager [req-c720e7c8-e765-45e4-8bfa-c248b0e32c37 req-248ac7d1-4827-47c2-bba8-36aee15aa5a6 service nova] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Detach interface failed, port_id=d95acbd4-4387-4053-9921-da1c96923d5c, reason: Instance 3ff55331-6d5c-4558-b932-e266670f2ac9 could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2219.193485] env[62684]: DEBUG oslo_vmware.api [None req-79dce794-2b9e-4ff0-b8bd-5994dc121753 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 2219.193485] env[62684]: value = "task-2053590" [ 2219.193485] env[62684]: _type = "Task" [ 2219.193485] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2219.195762] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2219.195762] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5215a95c-addb-9eb1-918b-d5f5cbebd344" [ 2219.195762] env[62684]: _type = "HttpNfcLease" [ 2219.195762] env[62684]: } is ready. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2219.198658] env[62684]: DEBUG oslo_vmware.rw_handles [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2219.198658] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5215a95c-addb-9eb1-918b-d5f5cbebd344" [ 2219.198658] env[62684]: _type = "HttpNfcLease" [ 2219.198658] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2219.200048] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-312569f4-0c81-4f9c-b52c-26b46ea16753 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2219.208160] env[62684]: DEBUG oslo_vmware.api [None req-79dce794-2b9e-4ff0-b8bd-5994dc121753 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053590, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2219.211420] env[62684]: DEBUG oslo_vmware.rw_handles [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528613b4-50b6-4826-5f8d-1b167659d156/disk-0.vmdk from lease info. {{(pid=62684) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2219.211604] env[62684]: DEBUG oslo_vmware.rw_handles [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528613b4-50b6-4826-5f8d-1b167659d156/disk-0.vmdk for reading. {{(pid=62684) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2219.301714] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-02286aad-7ab6-4a5b-8d54-44a89097f4e6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2219.403374] env[62684]: INFO nova.compute.manager [-] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Took 1.30 seconds to deallocate network for instance. [ 2219.584567] env[62684]: INFO nova.compute.manager [-] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Took 1.50 seconds to deallocate network for instance. [ 2219.666732] env[62684]: DEBUG nova.compute.utils [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2219.668286] env[62684]: DEBUG nova.compute.manager [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2219.668659] env[62684]: DEBUG nova.network.neutron [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2219.688101] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9716f47d-c5e1-4b9d-bd74-ee1b05b96083 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "d7f09d0e-f7b6-415e-8d82-47eba1153aa1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2219.688453] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9716f47d-c5e1-4b9d-bd74-ee1b05b96083 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "d7f09d0e-f7b6-415e-8d82-47eba1153aa1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2219.688781] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9716f47d-c5e1-4b9d-bd74-ee1b05b96083 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "d7f09d0e-f7b6-415e-8d82-47eba1153aa1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2219.689225] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9716f47d-c5e1-4b9d-bd74-ee1b05b96083 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "d7f09d0e-f7b6-415e-8d82-47eba1153aa1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2219.689444] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9716f47d-c5e1-4b9d-bd74-ee1b05b96083 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "d7f09d0e-f7b6-415e-8d82-47eba1153aa1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2219.691611] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dc53c289-04dc-487f-8818-dc3eab72d10e tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "refresh_cache-d7f09d0e-f7b6-415e-8d82-47eba1153aa1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2219.691837] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dc53c289-04dc-487f-8818-dc3eab72d10e tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquired lock "refresh_cache-d7f09d0e-f7b6-415e-8d82-47eba1153aa1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2219.692080] env[62684]: DEBUG nova.network.neutron [None req-dc53c289-04dc-487f-8818-dc3eab72d10e tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2219.694099] env[62684]: INFO nova.compute.manager [None req-9716f47d-c5e1-4b9d-bd74-ee1b05b96083 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Terminating instance [ 2219.695435] env[62684]: DEBUG nova.compute.manager [None req-9716f47d-c5e1-4b9d-bd74-ee1b05b96083 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2219.695640] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9716f47d-c5e1-4b9d-bd74-ee1b05b96083 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2219.696741] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c5f311a-a609-4254-bfe6-7f38962f9888 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2219.711647] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-9716f47d-c5e1-4b9d-bd74-ee1b05b96083 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2219.714977] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d83fb6ef-67db-403d-a672-81cc3ffdba40 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2219.717182] env[62684]: DEBUG oslo_vmware.api [None req-79dce794-2b9e-4ff0-b8bd-5994dc121753 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053590, 'name': ReconfigVM_Task, 'duration_secs': 0.208316} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2219.717903] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-79dce794-2b9e-4ff0-b8bd-5994dc121753 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Reconfigured VM instance instance-0000003f to detach disk 2001 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2219.722857] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c8a65748-f1e6-49a7-9389-8f29ef91a97a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2219.736689] env[62684]: DEBUG nova.network.neutron [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Updating instance_info_cache with network_info: [{"id": "34ae3a7e-83a9-4ebd-8582-bb73f3050948", "address": "fa:16:3e:fc:ef:f4", "network": {"id": "7678b347-6a54-4b84-9a4d-b566bbeb1ea4", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-51664912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76d88ac878d44480b3b54b24ab87efa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34ae3a7e-83", "ovs_interfaceid": "34ae3a7e-83a9-4ebd-8582-bb73f3050948", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2219.740768] env[62684]: DEBUG oslo_vmware.api [None req-9716f47d-c5e1-4b9d-bd74-ee1b05b96083 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 2219.740768] env[62684]: value = "task-2053591" [ 2219.740768] env[62684]: _type = "Task" [ 2219.740768] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2219.744807] env[62684]: DEBUG nova.policy [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2fab3230b61d440e93d1d0a975115405', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '27d04006afc747e19ad87238bfdbaad1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2219.754838] env[62684]: DEBUG oslo_vmware.api [None req-79dce794-2b9e-4ff0-b8bd-5994dc121753 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 2219.754838] env[62684]: value = "task-2053592" [ 2219.754838] env[62684]: _type = "Task" [ 2219.754838] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2219.763639] env[62684]: DEBUG oslo_vmware.api [None req-9716f47d-c5e1-4b9d-bd74-ee1b05b96083 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053591, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2219.767990] env[62684]: DEBUG oslo_concurrency.lockutils [None req-92e5635c-14ec-4f07-aaff-34ef8ef17fb4 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Acquiring lock "584845d2-d146-42bf-8ef5-58532fe24f65" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2219.769024] env[62684]: DEBUG oslo_concurrency.lockutils [None req-92e5635c-14ec-4f07-aaff-34ef8ef17fb4 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "584845d2-d146-42bf-8ef5-58532fe24f65" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2219.774319] env[62684]: DEBUG oslo_vmware.api [None req-79dce794-2b9e-4ff0-b8bd-5994dc121753 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053592, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2219.909461] env[62684]: DEBUG oslo_concurrency.lockutils [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2220.052494] env[62684]: DEBUG nova.network.neutron [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Successfully created port: 91d5100c-0d94-42a3-a4f2-5055bd108b50 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2220.091581] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ab30a1be-e837-418f-b120-3a8aa59e01f8 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2220.172148] env[62684]: DEBUG nova.compute.manager [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2220.245180] env[62684]: DEBUG oslo_concurrency.lockutils [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Releasing lock "refresh_cache-264c6900-dbef-455e-95cc-1df73c735cc8" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2220.271141] env[62684]: DEBUG oslo_vmware.api [None req-9716f47d-c5e1-4b9d-bd74-ee1b05b96083 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053591, 'name': PowerOffVM_Task, 'duration_secs': 0.165772} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2220.272209] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-9716f47d-c5e1-4b9d-bd74-ee1b05b96083 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2220.273075] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9716f47d-c5e1-4b9d-bd74-ee1b05b96083 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2220.277017] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4f95d180-6ed7-4b5e-9c9c-81b105c9bf6b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.279514] env[62684]: DEBUG nova.compute.utils [None req-92e5635c-14ec-4f07-aaff-34ef8ef17fb4 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2220.285153] env[62684]: DEBUG oslo_vmware.api [None req-79dce794-2b9e-4ff0-b8bd-5994dc121753 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053592, 'name': ReconfigVM_Task, 'duration_secs': 0.167558} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2220.285153] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-79dce794-2b9e-4ff0-b8bd-5994dc121753 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421371', 'volume_id': '004e22f3-56ef-4e39-994c-bd13ad8bbc11', 'name': 'volume-004e22f3-56ef-4e39-994c-bd13ad8bbc11', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2baabe7a-ed33-4cef-9acc-a7b804610b0a', 'attached_at': '', 'detached_at': '', 'volume_id': '004e22f3-56ef-4e39-994c-bd13ad8bbc11', 'serial': '004e22f3-56ef-4e39-994c-bd13ad8bbc11'} {{(pid=62684) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2220.408675] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9716f47d-c5e1-4b9d-bd74-ee1b05b96083 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2220.408949] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9716f47d-c5e1-4b9d-bd74-ee1b05b96083 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2220.409234] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-9716f47d-c5e1-4b9d-bd74-ee1b05b96083 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Deleting the datastore file [datastore2] d7f09d0e-f7b6-415e-8d82-47eba1153aa1 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2220.409608] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-62f59026-8a9a-419e-97a9-b21b175752d9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.416659] env[62684]: DEBUG oslo_vmware.api [None req-9716f47d-c5e1-4b9d-bd74-ee1b05b96083 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 2220.416659] env[62684]: value = "task-2053594" [ 2220.416659] env[62684]: _type = "Task" [ 2220.416659] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2220.428667] env[62684]: DEBUG oslo_vmware.api [None req-9716f47d-c5e1-4b9d-bd74-ee1b05b96083 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053594, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2220.471070] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b17b2bb-8e0a-496a-b312-d8709decb9cd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.479084] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eeaf8d2-9f95-4e37-80cf-e22ec889fcd2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.518045] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be61c945-9e9d-49e1-97af-a264a82fb796 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.526486] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caa28cb4-7f41-4769-b4d4-a21c9e5a07d7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.541015] env[62684]: DEBUG nova.compute.provider_tree [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2220.556421] env[62684]: INFO nova.network.neutron [None req-dc53c289-04dc-487f-8818-dc3eab72d10e tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Port e60b8515-c469-46d3-945b-bc843ccffc44 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 2220.556834] env[62684]: DEBUG nova.network.neutron [None req-dc53c289-04dc-487f-8818-dc3eab72d10e tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Updating instance_info_cache with network_info: [{"id": "efda8f4d-97b4-44f8-b30b-d26145e98e58", "address": "fa:16:3e:be:97:33", "network": {"id": "bbb78a3c-6804-4aae-9107-4ae6699c305d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2029963636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.240", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5cb4900a999e467bafdfd1fb407a82f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefda8f4d-97", "ovs_interfaceid": "efda8f4d-97b4-44f8-b30b-d26145e98e58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2220.773042] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5bf70cd-941e-4c71-b416-ec88770994af {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.793245] env[62684]: DEBUG oslo_concurrency.lockutils [None req-92e5635c-14ec-4f07-aaff-34ef8ef17fb4 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "584845d2-d146-42bf-8ef5-58532fe24f65" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.025s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2220.794658] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-301d410c-7c72-4290-b69e-32d5a8fa5005 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.803318] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Updating instance '264c6900-dbef-455e-95cc-1df73c735cc8' progress to 83 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2220.836207] env[62684]: DEBUG nova.objects.instance [None req-79dce794-2b9e-4ff0-b8bd-5994dc121753 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lazy-loading 'flavor' on Instance uuid 2baabe7a-ed33-4cef-9acc-a7b804610b0a {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2220.930230] env[62684]: DEBUG oslo_vmware.api [None req-9716f47d-c5e1-4b9d-bd74-ee1b05b96083 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053594, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.190049} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2220.930558] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-9716f47d-c5e1-4b9d-bd74-ee1b05b96083 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2220.930815] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9716f47d-c5e1-4b9d-bd74-ee1b05b96083 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2220.931414] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9716f47d-c5e1-4b9d-bd74-ee1b05b96083 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2220.931414] env[62684]: INFO nova.compute.manager [None req-9716f47d-c5e1-4b9d-bd74-ee1b05b96083 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Took 1.24 seconds to destroy the instance on the hypervisor. [ 2220.931612] env[62684]: DEBUG oslo.service.loopingcall [None req-9716f47d-c5e1-4b9d-bd74-ee1b05b96083 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2220.931879] env[62684]: DEBUG nova.compute.manager [-] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2220.931986] env[62684]: DEBUG nova.network.neutron [-] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2221.045130] env[62684]: DEBUG nova.scheduler.client.report [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2221.059680] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dc53c289-04dc-487f-8818-dc3eab72d10e tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Releasing lock "refresh_cache-d7f09d0e-f7b6-415e-8d82-47eba1153aa1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2221.184299] env[62684]: DEBUG nova.compute.manager [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2221.211159] env[62684]: DEBUG nova.virt.hardware [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2221.211422] env[62684]: DEBUG nova.virt.hardware [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2221.211589] env[62684]: DEBUG nova.virt.hardware [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2221.211785] env[62684]: DEBUG nova.virt.hardware [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2221.211934] env[62684]: DEBUG nova.virt.hardware [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2221.212098] env[62684]: DEBUG nova.virt.hardware [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2221.212315] env[62684]: DEBUG nova.virt.hardware [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2221.212483] env[62684]: DEBUG nova.virt.hardware [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2221.212656] env[62684]: DEBUG nova.virt.hardware [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2221.212825] env[62684]: DEBUG nova.virt.hardware [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2221.213012] env[62684]: DEBUG nova.virt.hardware [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2221.213909] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f3317e8-9296-4436-81a8-742653fd0986 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.224623] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2b737bf-d675-43a0-bf23-2e86fc076ca4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.309342] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2221.309706] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5293ef02-bf46-44c6-8d2c-2ef487b4e4ae {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.317846] env[62684]: DEBUG oslo_vmware.api [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2221.317846] env[62684]: value = "task-2053595" [ 2221.317846] env[62684]: _type = "Task" [ 2221.317846] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2221.326418] env[62684]: DEBUG oslo_vmware.api [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053595, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2221.551165] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.390s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2221.551165] env[62684]: DEBUG nova.compute.manager [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2221.553881] env[62684]: DEBUG oslo_concurrency.lockutils [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.645s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2221.554134] env[62684]: DEBUG nova.objects.instance [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Lazy-loading 'resources' on Instance uuid 58e67d8e-900e-4d22-a4fd-fe493758d4f2 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2221.563644] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dc53c289-04dc-487f-8818-dc3eab72d10e tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "interface-d7f09d0e-f7b6-415e-8d82-47eba1153aa1-e60b8515-c469-46d3-945b-bc843ccffc44" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 13.459s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2221.709457] env[62684]: DEBUG nova.compute.manager [req-a96f926d-5704-45a1-b4e9-47dacac07f20 req-44c4f611-7c59-4baf-a2f2-c47d238608a4 service nova] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Received event network-vif-plugged-91d5100c-0d94-42a3-a4f2-5055bd108b50 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2221.709794] env[62684]: DEBUG oslo_concurrency.lockutils [req-a96f926d-5704-45a1-b4e9-47dacac07f20 req-44c4f611-7c59-4baf-a2f2-c47d238608a4 service nova] Acquiring lock "587edf89-2ea0-4b89-8830-fa766b798398-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2221.710549] env[62684]: DEBUG oslo_concurrency.lockutils [req-a96f926d-5704-45a1-b4e9-47dacac07f20 req-44c4f611-7c59-4baf-a2f2-c47d238608a4 service nova] Lock "587edf89-2ea0-4b89-8830-fa766b798398-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2221.710549] env[62684]: DEBUG oslo_concurrency.lockutils [req-a96f926d-5704-45a1-b4e9-47dacac07f20 req-44c4f611-7c59-4baf-a2f2-c47d238608a4 service nova] Lock "587edf89-2ea0-4b89-8830-fa766b798398-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2221.710549] env[62684]: DEBUG nova.compute.manager [req-a96f926d-5704-45a1-b4e9-47dacac07f20 req-44c4f611-7c59-4baf-a2f2-c47d238608a4 service nova] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] No waiting events found dispatching network-vif-plugged-91d5100c-0d94-42a3-a4f2-5055bd108b50 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2221.710872] env[62684]: WARNING nova.compute.manager [req-a96f926d-5704-45a1-b4e9-47dacac07f20 req-44c4f611-7c59-4baf-a2f2-c47d238608a4 service nova] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Received unexpected event network-vif-plugged-91d5100c-0d94-42a3-a4f2-5055bd108b50 for instance with vm_state building and task_state spawning. [ 2221.830681] env[62684]: DEBUG oslo_vmware.api [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053595, 'name': PowerOnVM_Task, 'duration_secs': 0.425056} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2221.831448] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2221.832144] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-daac94c9-923d-4723-9f02-4b871b653997 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Updating instance '264c6900-dbef-455e-95cc-1df73c735cc8' progress to 100 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2221.838222] env[62684]: DEBUG oslo_concurrency.lockutils [None req-92e5635c-14ec-4f07-aaff-34ef8ef17fb4 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Acquiring lock "584845d2-d146-42bf-8ef5-58532fe24f65" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2221.838590] env[62684]: DEBUG oslo_concurrency.lockutils [None req-92e5635c-14ec-4f07-aaff-34ef8ef17fb4 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "584845d2-d146-42bf-8ef5-58532fe24f65" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2221.839241] env[62684]: INFO nova.compute.manager [None req-92e5635c-14ec-4f07-aaff-34ef8ef17fb4 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Attaching volume cba4160c-2fba-4b43-b9ec-1bef4fb246a1 to /dev/sdc [ 2221.845203] env[62684]: DEBUG oslo_concurrency.lockutils [None req-79dce794-2b9e-4ff0-b8bd-5994dc121753 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lock "2baabe7a-ed33-4cef-9acc-a7b804610b0a" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.329s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2221.860634] env[62684]: DEBUG nova.network.neutron [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Successfully updated port: 91d5100c-0d94-42a3-a4f2-5055bd108b50 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2221.888704] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fef57bd-9427-4539-9837-d523e7ed7474 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.896482] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccb2b13c-5cc7-4f2f-b0d7-b892236adebe {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.910777] env[62684]: DEBUG nova.virt.block_device [None req-92e5635c-14ec-4f07-aaff-34ef8ef17fb4 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Updating existing volume attachment record: 25fa3b2b-11a7-4e5a-b0ca-b196761bbefd {{(pid=62684) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2222.056846] env[62684]: DEBUG nova.compute.utils [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2222.058474] env[62684]: DEBUG nova.compute.manager [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2222.058672] env[62684]: DEBUG nova.network.neutron [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2222.112825] env[62684]: DEBUG nova.policy [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f2cd375ad4a34027b34407d99993b084', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f0e0f0e1dc834134913bd742fa99b52f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2222.274109] env[62684]: DEBUG nova.network.neutron [-] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2222.325187] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-864a9610-d4a7-4798-ab26-477bada43fb3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.335624] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-508c3e18-68b3-476b-9f56-e48f38f3b591 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.369952] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "refresh_cache-587edf89-2ea0-4b89-8830-fa766b798398" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2222.369952] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquired lock "refresh_cache-587edf89-2ea0-4b89-8830-fa766b798398" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2222.369952] env[62684]: DEBUG nova.network.neutron [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2222.380021] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7c53b8f-6908-4809-90e0-90de88c90d26 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.385463] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b0b4d55-95c0-4473-b316-8f59d5ad3f56 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.402683] env[62684]: DEBUG nova.compute.provider_tree [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2222.422847] env[62684]: DEBUG nova.network.neutron [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Successfully created port: 61adf5cc-1692-4079-b909-b15313ce9680 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2222.565020] env[62684]: DEBUG nova.compute.manager [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2222.779537] env[62684]: INFO nova.compute.manager [-] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Took 1.85 seconds to deallocate network for instance. [ 2222.908754] env[62684]: DEBUG nova.network.neutron [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2222.923232] env[62684]: ERROR nova.scheduler.client.report [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] [req-3e101f85-e98f-47dc-aa8c-3a8b1d4fb839] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c23c281e-ec1f-4876-972e-a98655f2084f. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-3e101f85-e98f-47dc-aa8c-3a8b1d4fb839"}]} [ 2222.934523] env[62684]: DEBUG oslo_concurrency.lockutils [None req-65a98383-e857-4032-bf62-caf9e538b299 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquiring lock "2baabe7a-ed33-4cef-9acc-a7b804610b0a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2222.934819] env[62684]: DEBUG oslo_concurrency.lockutils [None req-65a98383-e857-4032-bf62-caf9e538b299 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lock "2baabe7a-ed33-4cef-9acc-a7b804610b0a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2222.935505] env[62684]: DEBUG oslo_concurrency.lockutils [None req-65a98383-e857-4032-bf62-caf9e538b299 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquiring lock "2baabe7a-ed33-4cef-9acc-a7b804610b0a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2222.935505] env[62684]: DEBUG oslo_concurrency.lockutils [None req-65a98383-e857-4032-bf62-caf9e538b299 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lock "2baabe7a-ed33-4cef-9acc-a7b804610b0a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2222.935505] env[62684]: DEBUG oslo_concurrency.lockutils [None req-65a98383-e857-4032-bf62-caf9e538b299 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lock "2baabe7a-ed33-4cef-9acc-a7b804610b0a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2222.938708] env[62684]: INFO nova.compute.manager [None req-65a98383-e857-4032-bf62-caf9e538b299 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Terminating instance [ 2222.940640] env[62684]: DEBUG nova.compute.manager [None req-65a98383-e857-4032-bf62-caf9e538b299 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2222.940825] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-65a98383-e857-4032-bf62-caf9e538b299 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2222.941984] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-202a35f7-ab8a-458d-8248-1c7d7ba5cf44 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.945669] env[62684]: DEBUG nova.scheduler.client.report [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Refreshing inventories for resource provider c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2222.952660] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-65a98383-e857-4032-bf62-caf9e538b299 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2222.953357] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ca10a0cd-8f50-4e40-8724-67daf2e67800 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.960504] env[62684]: DEBUG oslo_vmware.api [None req-65a98383-e857-4032-bf62-caf9e538b299 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 2222.960504] env[62684]: value = "task-2053597" [ 2222.960504] env[62684]: _type = "Task" [ 2222.960504] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2222.964737] env[62684]: DEBUG nova.scheduler.client.report [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Updating ProviderTree inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2222.965938] env[62684]: DEBUG nova.compute.provider_tree [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2222.974550] env[62684]: DEBUG oslo_vmware.api [None req-65a98383-e857-4032-bf62-caf9e538b299 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053597, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2222.981587] env[62684]: DEBUG nova.scheduler.client.report [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Refreshing aggregate associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, aggregates: None {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2223.001918] env[62684]: DEBUG nova.scheduler.client.report [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Refreshing trait associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2223.059644] env[62684]: DEBUG nova.network.neutron [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Updating instance_info_cache with network_info: [{"id": "91d5100c-0d94-42a3-a4f2-5055bd108b50", "address": "fa:16:3e:58:06:3e", "network": {"id": "e177c6d0-ddd5-4029-94af-c8f1b937dd9f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1344612161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27d04006afc747e19ad87238bfdbaad1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "171aeae0-6a27-44fc-bc3d-a2d5581fc702", "external-id": "nsx-vlan-transportzone-410", "segmentation_id": 410, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91d5100c-0d", "ovs_interfaceid": "91d5100c-0d94-42a3-a4f2-5055bd108b50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2223.249273] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17d3e2cc-8305-4931-a073-45a0fd93d446 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2223.258541] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eb13df2-30d7-4542-978e-b324ce45bc5d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2223.294468] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9716f47d-c5e1-4b9d-bd74-ee1b05b96083 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2223.295558] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21c6ebfd-bf46-4447-9cf1-3e79bb674c11 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2223.303748] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48c23eaf-dbbd-487e-9394-6a4e33b0e72f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2223.321591] env[62684]: DEBUG nova.compute.provider_tree [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2223.472102] env[62684]: DEBUG oslo_vmware.api [None req-65a98383-e857-4032-bf62-caf9e538b299 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053597, 'name': PowerOffVM_Task, 'duration_secs': 0.226961} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2223.472542] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-65a98383-e857-4032-bf62-caf9e538b299 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2223.472797] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-65a98383-e857-4032-bf62-caf9e538b299 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2223.473159] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5bf6320b-4d2f-4189-a29c-55ca0422ed39 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2223.556161] env[62684]: DEBUG oslo_concurrency.lockutils [None req-83de388b-5cbc-417e-bfb6-e7c4207a5031 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "264c6900-dbef-455e-95cc-1df73c735cc8" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2223.556494] env[62684]: DEBUG oslo_concurrency.lockutils [None req-83de388b-5cbc-417e-bfb6-e7c4207a5031 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "264c6900-dbef-455e-95cc-1df73c735cc8" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2223.556701] env[62684]: DEBUG nova.compute.manager [None req-83de388b-5cbc-417e-bfb6-e7c4207a5031 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Going to confirm migration 4 {{(pid=62684) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 2223.561825] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Releasing lock "refresh_cache-587edf89-2ea0-4b89-8830-fa766b798398" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2223.562131] env[62684]: DEBUG nova.compute.manager [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Instance network_info: |[{"id": "91d5100c-0d94-42a3-a4f2-5055bd108b50", "address": "fa:16:3e:58:06:3e", "network": {"id": "e177c6d0-ddd5-4029-94af-c8f1b937dd9f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1344612161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27d04006afc747e19ad87238bfdbaad1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "171aeae0-6a27-44fc-bc3d-a2d5581fc702", "external-id": "nsx-vlan-transportzone-410", "segmentation_id": 410, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91d5100c-0d", "ovs_interfaceid": "91d5100c-0d94-42a3-a4f2-5055bd108b50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2223.562561] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:58:06:3e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '171aeae0-6a27-44fc-bc3d-a2d5581fc702', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '91d5100c-0d94-42a3-a4f2-5055bd108b50', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2223.570250] env[62684]: DEBUG oslo.service.loopingcall [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2223.570477] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2223.570713] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f102b1b2-cf9e-4012-9d6b-54e0420c1b1b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2223.587237] env[62684]: DEBUG nova.compute.manager [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2223.596982] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-65a98383-e857-4032-bf62-caf9e538b299 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2223.597313] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-65a98383-e857-4032-bf62-caf9e538b299 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2223.597572] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-65a98383-e857-4032-bf62-caf9e538b299 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Deleting the datastore file [datastore1] 2baabe7a-ed33-4cef-9acc-a7b804610b0a {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2223.599679] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fb29939a-6f0d-43dd-b3ce-68210a861b3a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2223.601795] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2223.601795] env[62684]: value = "task-2053599" [ 2223.601795] env[62684]: _type = "Task" [ 2223.601795] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2223.611395] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053599, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2223.615706] env[62684]: DEBUG oslo_vmware.api [None req-65a98383-e857-4032-bf62-caf9e538b299 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 2223.615706] env[62684]: value = "task-2053600" [ 2223.615706] env[62684]: _type = "Task" [ 2223.615706] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2223.624787] env[62684]: DEBUG nova.virt.hardware [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2223.625079] env[62684]: DEBUG nova.virt.hardware [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2223.625303] env[62684]: DEBUG nova.virt.hardware [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2223.625560] env[62684]: DEBUG nova.virt.hardware [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2223.625755] env[62684]: DEBUG nova.virt.hardware [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2223.625950] env[62684]: DEBUG nova.virt.hardware [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2223.626235] env[62684]: DEBUG nova.virt.hardware [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2223.626518] env[62684]: DEBUG nova.virt.hardware [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2223.626802] env[62684]: DEBUG nova.virt.hardware [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2223.627062] env[62684]: DEBUG nova.virt.hardware [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2223.627315] env[62684]: DEBUG nova.virt.hardware [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2223.628711] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3e5a478-d309-492c-8ff2-539630621ba3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2223.636359] env[62684]: DEBUG oslo_vmware.api [None req-65a98383-e857-4032-bf62-caf9e538b299 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053600, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2223.643382] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9069ba35-730b-4470-a87e-3e2c746c391c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2223.747428] env[62684]: DEBUG nova.compute.manager [req-6575b301-980f-413e-a852-a930faa040cf req-ad8a632e-d11b-4d1f-acf0-c468310a6b8c service nova] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Received event network-vif-deleted-efda8f4d-97b4-44f8-b30b-d26145e98e58 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2223.747916] env[62684]: DEBUG nova.compute.manager [req-6575b301-980f-413e-a852-a930faa040cf req-ad8a632e-d11b-4d1f-acf0-c468310a6b8c service nova] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Received event network-changed-91d5100c-0d94-42a3-a4f2-5055bd108b50 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2223.747916] env[62684]: DEBUG nova.compute.manager [req-6575b301-980f-413e-a852-a930faa040cf req-ad8a632e-d11b-4d1f-acf0-c468310a6b8c service nova] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Refreshing instance network info cache due to event network-changed-91d5100c-0d94-42a3-a4f2-5055bd108b50. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2223.748047] env[62684]: DEBUG oslo_concurrency.lockutils [req-6575b301-980f-413e-a852-a930faa040cf req-ad8a632e-d11b-4d1f-acf0-c468310a6b8c service nova] Acquiring lock "refresh_cache-587edf89-2ea0-4b89-8830-fa766b798398" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2223.748168] env[62684]: DEBUG oslo_concurrency.lockutils [req-6575b301-980f-413e-a852-a930faa040cf req-ad8a632e-d11b-4d1f-acf0-c468310a6b8c service nova] Acquired lock "refresh_cache-587edf89-2ea0-4b89-8830-fa766b798398" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2223.748341] env[62684]: DEBUG nova.network.neutron [req-6575b301-980f-413e-a852-a930faa040cf req-ad8a632e-d11b-4d1f-acf0-c468310a6b8c service nova] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Refreshing network info cache for port 91d5100c-0d94-42a3-a4f2-5055bd108b50 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2223.863801] env[62684]: DEBUG nova.scheduler.client.report [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Updated inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f with generation 156 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2223.864119] env[62684]: DEBUG nova.compute.provider_tree [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Updating resource provider c23c281e-ec1f-4876-972e-a98655f2084f generation from 156 to 157 during operation: update_inventory {{(pid=62684) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2223.864307] env[62684]: DEBUG nova.compute.provider_tree [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2223.939904] env[62684]: DEBUG nova.network.neutron [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Successfully updated port: 61adf5cc-1692-4079-b909-b15313ce9680 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2224.114472] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053599, 'name': CreateVM_Task, 'duration_secs': 0.44927} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2224.114654] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2224.115343] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2224.115515] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2224.116048] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2224.116135] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0049d08-7465-4573-bba0-1144526ad6f4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2224.124205] env[62684]: DEBUG oslo_vmware.api [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2224.124205] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e97111-959c-70d1-8516-946045f586b7" [ 2224.124205] env[62684]: _type = "Task" [ 2224.124205] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2224.127984] env[62684]: DEBUG oslo_vmware.api [None req-65a98383-e857-4032-bf62-caf9e538b299 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053600, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158868} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2224.128478] env[62684]: DEBUG oslo_concurrency.lockutils [None req-83de388b-5cbc-417e-bfb6-e7c4207a5031 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "refresh_cache-264c6900-dbef-455e-95cc-1df73c735cc8" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2224.128641] env[62684]: DEBUG oslo_concurrency.lockutils [None req-83de388b-5cbc-417e-bfb6-e7c4207a5031 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquired lock "refresh_cache-264c6900-dbef-455e-95cc-1df73c735cc8" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2224.128810] env[62684]: DEBUG nova.network.neutron [None req-83de388b-5cbc-417e-bfb6-e7c4207a5031 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2224.129008] env[62684]: DEBUG nova.objects.instance [None req-83de388b-5cbc-417e-bfb6-e7c4207a5031 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lazy-loading 'info_cache' on Instance uuid 264c6900-dbef-455e-95cc-1df73c735cc8 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2224.133376] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-65a98383-e857-4032-bf62-caf9e538b299 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2224.133575] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-65a98383-e857-4032-bf62-caf9e538b299 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2224.133758] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-65a98383-e857-4032-bf62-caf9e538b299 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2224.133943] env[62684]: INFO nova.compute.manager [None req-65a98383-e857-4032-bf62-caf9e538b299 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Took 1.19 seconds to destroy the instance on the hypervisor. [ 2224.134208] env[62684]: DEBUG oslo.service.loopingcall [None req-65a98383-e857-4032-bf62-caf9e538b299 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2224.134669] env[62684]: DEBUG nova.compute.manager [-] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2224.134772] env[62684]: DEBUG nova.network.neutron [-] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2224.142047] env[62684]: DEBUG oslo_vmware.api [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e97111-959c-70d1-8516-946045f586b7, 'name': SearchDatastore_Task, 'duration_secs': 0.011425} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2224.142379] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2224.142614] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2224.142853] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2224.143012] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2224.143220] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2224.144130] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c67881bd-4bdb-46d8-a444-8fec4e50300c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2224.152995] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2224.153221] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2224.154278] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05804960-6e08-4e54-aab2-519bc1ebf092 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2224.159552] env[62684]: DEBUG oslo_vmware.api [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2224.159552] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c63569-7df2-456b-a294-ad0df6d3c92f" [ 2224.159552] env[62684]: _type = "Task" [ 2224.159552] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2224.167583] env[62684]: DEBUG oslo_vmware.api [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c63569-7df2-456b-a294-ad0df6d3c92f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2224.370132] env[62684]: DEBUG oslo_concurrency.lockutils [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.816s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2224.374277] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ab30a1be-e837-418f-b120-3a8aa59e01f8 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.282s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2224.374277] env[62684]: DEBUG nova.objects.instance [None req-ab30a1be-e837-418f-b120-3a8aa59e01f8 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Lazy-loading 'resources' on Instance uuid 3ff55331-6d5c-4558-b932-e266670f2ac9 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2224.395782] env[62684]: INFO nova.scheduler.client.report [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Deleted allocations for instance 58e67d8e-900e-4d22-a4fd-fe493758d4f2 [ 2224.442962] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Acquiring lock "refresh_cache-9f1e9ae9-c082-4fbe-bd21-6e14e40962c1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2224.443171] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Acquired lock "refresh_cache-9f1e9ae9-c082-4fbe-bd21-6e14e40962c1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2224.443315] env[62684]: DEBUG nova.network.neutron [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2224.676891] env[62684]: DEBUG oslo_vmware.api [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c63569-7df2-456b-a294-ad0df6d3c92f, 'name': SearchDatastore_Task, 'duration_secs': 0.011397} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2224.677742] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ef985d7-2085-4ae5-a9ba-456423b699c8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2224.683497] env[62684]: DEBUG oslo_vmware.api [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2224.683497] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5206d513-6f2b-219a-f842-8f12c0707369" [ 2224.683497] env[62684]: _type = "Task" [ 2224.683497] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2224.692974] env[62684]: DEBUG oslo_vmware.api [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5206d513-6f2b-219a-f842-8f12c0707369, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2224.794862] env[62684]: DEBUG nova.network.neutron [req-6575b301-980f-413e-a852-a930faa040cf req-ad8a632e-d11b-4d1f-acf0-c468310a6b8c service nova] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Updated VIF entry in instance network info cache for port 91d5100c-0d94-42a3-a4f2-5055bd108b50. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2224.795336] env[62684]: DEBUG nova.network.neutron [req-6575b301-980f-413e-a852-a930faa040cf req-ad8a632e-d11b-4d1f-acf0-c468310a6b8c service nova] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Updating instance_info_cache with network_info: [{"id": "91d5100c-0d94-42a3-a4f2-5055bd108b50", "address": "fa:16:3e:58:06:3e", "network": {"id": "e177c6d0-ddd5-4029-94af-c8f1b937dd9f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1344612161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27d04006afc747e19ad87238bfdbaad1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "171aeae0-6a27-44fc-bc3d-a2d5581fc702", "external-id": "nsx-vlan-transportzone-410", "segmentation_id": 410, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91d5100c-0d", "ovs_interfaceid": "91d5100c-0d94-42a3-a4f2-5055bd108b50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2224.911934] env[62684]: DEBUG oslo_concurrency.lockutils [None req-399d6a46-ca95-4fd4-93ec-848f294ae148 tempest-InstanceActionsNegativeTestJSON-1020381940 tempest-InstanceActionsNegativeTestJSON-1020381940-project-member] Lock "58e67d8e-900e-4d22-a4fd-fe493758d4f2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.637s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2224.982242] env[62684]: DEBUG nova.network.neutron [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2225.122719] env[62684]: DEBUG nova.network.neutron [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Updating instance_info_cache with network_info: [{"id": "61adf5cc-1692-4079-b909-b15313ce9680", "address": "fa:16:3e:17:9f:65", "network": {"id": "4142ba34-c2e0-4a22-a8dd-be06ba98c6e5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1627792019-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f0e0f0e1dc834134913bd742fa99b52f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61adf5cc-16", "ovs_interfaceid": "61adf5cc-1692-4079-b909-b15313ce9680", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2225.143079] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbe7203f-7e4f-428b-add3-af80c87bddb9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.152133] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b1d0e42-5911-4894-a80a-c8c11be40ff6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.187266] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41d845c3-9e74-4e82-9466-18b037df695d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.200644] env[62684]: DEBUG oslo_vmware.api [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5206d513-6f2b-219a-f842-8f12c0707369, 'name': SearchDatastore_Task, 'duration_secs': 0.011112} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2225.201385] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2225.202011] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 587edf89-2ea0-4b89-8830-fa766b798398/587edf89-2ea0-4b89-8830-fa766b798398.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2225.202952] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc276509-ea18-4eab-8192-805c95e58e7b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.207158] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ce7da510-7a30-4f3d-9389-fdf0c5613b93 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.219213] env[62684]: DEBUG nova.compute.provider_tree [None req-ab30a1be-e837-418f-b120-3a8aa59e01f8 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2225.222059] env[62684]: DEBUG oslo_vmware.api [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2225.222059] env[62684]: value = "task-2053602" [ 2225.222059] env[62684]: _type = "Task" [ 2225.222059] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2225.230049] env[62684]: DEBUG oslo_vmware.api [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053602, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2225.303683] env[62684]: DEBUG oslo_concurrency.lockutils [req-6575b301-980f-413e-a852-a930faa040cf req-ad8a632e-d11b-4d1f-acf0-c468310a6b8c service nova] Releasing lock "refresh_cache-587edf89-2ea0-4b89-8830-fa766b798398" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2225.454728] env[62684]: DEBUG nova.network.neutron [-] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2225.501048] env[62684]: DEBUG nova.network.neutron [None req-83de388b-5cbc-417e-bfb6-e7c4207a5031 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Updating instance_info_cache with network_info: [{"id": "34ae3a7e-83a9-4ebd-8582-bb73f3050948", "address": "fa:16:3e:fc:ef:f4", "network": {"id": "7678b347-6a54-4b84-9a4d-b566bbeb1ea4", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-51664912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76d88ac878d44480b3b54b24ab87efa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34ae3a7e-83", "ovs_interfaceid": "34ae3a7e-83a9-4ebd-8582-bb73f3050948", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2225.625078] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Releasing lock "refresh_cache-9f1e9ae9-c082-4fbe-bd21-6e14e40962c1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2225.625475] env[62684]: DEBUG nova.compute.manager [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Instance network_info: |[{"id": "61adf5cc-1692-4079-b909-b15313ce9680", "address": "fa:16:3e:17:9f:65", "network": {"id": "4142ba34-c2e0-4a22-a8dd-be06ba98c6e5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1627792019-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f0e0f0e1dc834134913bd742fa99b52f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61adf5cc-16", "ovs_interfaceid": "61adf5cc-1692-4079-b909-b15313ce9680", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2225.625910] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:9f:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f2e45023-22b5-458b-826e-9b7eb69ba028', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '61adf5cc-1692-4079-b909-b15313ce9680', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2225.634063] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Creating folder: Project (f0e0f0e1dc834134913bd742fa99b52f). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2225.634443] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8c41f1ab-fae4-4a26-92eb-68333cf7d374 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.646922] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Created folder: Project (f0e0f0e1dc834134913bd742fa99b52f) in parent group-v421118. [ 2225.647170] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Creating folder: Instances. Parent ref: group-v421392. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2225.647825] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a7dfc6ef-764a-4205-8b6a-a9a0485104fb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.660093] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Created folder: Instances in parent group-v421392. [ 2225.660377] env[62684]: DEBUG oslo.service.loopingcall [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2225.660588] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2225.660813] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-acfcd60b-882e-4035-b7e9-18013f3ec42a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.681594] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2225.681594] env[62684]: value = "task-2053605" [ 2225.681594] env[62684]: _type = "Task" [ 2225.681594] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2225.692096] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053605, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2225.723820] env[62684]: DEBUG nova.scheduler.client.report [None req-ab30a1be-e837-418f-b120-3a8aa59e01f8 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2225.737069] env[62684]: DEBUG oslo_vmware.api [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053602, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2225.785795] env[62684]: DEBUG nova.compute.manager [req-d02a5654-d716-4dbb-ba83-f39cba5b5ba4 req-833442f5-c366-485f-ae47-a56c59239089 service nova] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Received event network-vif-plugged-61adf5cc-1692-4079-b909-b15313ce9680 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2225.786172] env[62684]: DEBUG oslo_concurrency.lockutils [req-d02a5654-d716-4dbb-ba83-f39cba5b5ba4 req-833442f5-c366-485f-ae47-a56c59239089 service nova] Acquiring lock "9f1e9ae9-c082-4fbe-bd21-6e14e40962c1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2225.786249] env[62684]: DEBUG oslo_concurrency.lockutils [req-d02a5654-d716-4dbb-ba83-f39cba5b5ba4 req-833442f5-c366-485f-ae47-a56c59239089 service nova] Lock "9f1e9ae9-c082-4fbe-bd21-6e14e40962c1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2225.786537] env[62684]: DEBUG oslo_concurrency.lockutils [req-d02a5654-d716-4dbb-ba83-f39cba5b5ba4 req-833442f5-c366-485f-ae47-a56c59239089 service nova] Lock "9f1e9ae9-c082-4fbe-bd21-6e14e40962c1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2225.786723] env[62684]: DEBUG nova.compute.manager [req-d02a5654-d716-4dbb-ba83-f39cba5b5ba4 req-833442f5-c366-485f-ae47-a56c59239089 service nova] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] No waiting events found dispatching network-vif-plugged-61adf5cc-1692-4079-b909-b15313ce9680 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2225.786892] env[62684]: WARNING nova.compute.manager [req-d02a5654-d716-4dbb-ba83-f39cba5b5ba4 req-833442f5-c366-485f-ae47-a56c59239089 service nova] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Received unexpected event network-vif-plugged-61adf5cc-1692-4079-b909-b15313ce9680 for instance with vm_state building and task_state spawning. [ 2225.787071] env[62684]: DEBUG nova.compute.manager [req-d02a5654-d716-4dbb-ba83-f39cba5b5ba4 req-833442f5-c366-485f-ae47-a56c59239089 service nova] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Received event network-changed-61adf5cc-1692-4079-b909-b15313ce9680 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2225.787437] env[62684]: DEBUG nova.compute.manager [req-d02a5654-d716-4dbb-ba83-f39cba5b5ba4 req-833442f5-c366-485f-ae47-a56c59239089 service nova] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Refreshing instance network info cache due to event network-changed-61adf5cc-1692-4079-b909-b15313ce9680. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2225.787701] env[62684]: DEBUG oslo_concurrency.lockutils [req-d02a5654-d716-4dbb-ba83-f39cba5b5ba4 req-833442f5-c366-485f-ae47-a56c59239089 service nova] Acquiring lock "refresh_cache-9f1e9ae9-c082-4fbe-bd21-6e14e40962c1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2225.787846] env[62684]: DEBUG oslo_concurrency.lockutils [req-d02a5654-d716-4dbb-ba83-f39cba5b5ba4 req-833442f5-c366-485f-ae47-a56c59239089 service nova] Acquired lock "refresh_cache-9f1e9ae9-c082-4fbe-bd21-6e14e40962c1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2225.788015] env[62684]: DEBUG nova.network.neutron [req-d02a5654-d716-4dbb-ba83-f39cba5b5ba4 req-833442f5-c366-485f-ae47-a56c59239089 service nova] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Refreshing network info cache for port 61adf5cc-1692-4079-b909-b15313ce9680 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2225.958379] env[62684]: INFO nova.compute.manager [-] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Took 1.82 seconds to deallocate network for instance. [ 2226.003783] env[62684]: DEBUG oslo_concurrency.lockutils [None req-83de388b-5cbc-417e-bfb6-e7c4207a5031 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Releasing lock "refresh_cache-264c6900-dbef-455e-95cc-1df73c735cc8" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2226.004123] env[62684]: DEBUG nova.objects.instance [None req-83de388b-5cbc-417e-bfb6-e7c4207a5031 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lazy-loading 'migration_context' on Instance uuid 264c6900-dbef-455e-95cc-1df73c735cc8 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2226.194055] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053605, 'name': CreateVM_Task, 'duration_secs': 0.416482} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2226.194055] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2226.194360] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2226.194395] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2226.194967] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2226.195139] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86d7d90a-5183-49e1-9d80-6e9f26338ebb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.200354] env[62684]: DEBUG oslo_vmware.api [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for the task: (returnval){ [ 2226.200354] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521259cc-1f01-3934-fef8-cb89f06a7e0e" [ 2226.200354] env[62684]: _type = "Task" [ 2226.200354] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2226.209580] env[62684]: DEBUG oslo_vmware.api [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521259cc-1f01-3934-fef8-cb89f06a7e0e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2226.231845] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ab30a1be-e837-418f-b120-3a8aa59e01f8 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.859s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2226.234216] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9716f47d-c5e1-4b9d-bd74-ee1b05b96083 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.940s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2226.234411] env[62684]: DEBUG nova.objects.instance [None req-9716f47d-c5e1-4b9d-bd74-ee1b05b96083 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lazy-loading 'resources' on Instance uuid d7f09d0e-f7b6-415e-8d82-47eba1153aa1 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2226.240797] env[62684]: DEBUG oslo_vmware.api [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053602, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.593602} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2226.241083] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 587edf89-2ea0-4b89-8830-fa766b798398/587edf89-2ea0-4b89-8830-fa766b798398.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2226.241728] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2226.241728] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d580a3bc-36fb-4669-94db-0114727e4931 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.249769] env[62684]: DEBUG oslo_vmware.api [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2226.249769] env[62684]: value = "task-2053606" [ 2226.249769] env[62684]: _type = "Task" [ 2226.249769] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2226.259909] env[62684]: DEBUG oslo_vmware.api [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053606, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2226.263960] env[62684]: INFO nova.scheduler.client.report [None req-ab30a1be-e837-418f-b120-3a8aa59e01f8 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Deleted allocations for instance 3ff55331-6d5c-4558-b932-e266670f2ac9 [ 2226.467822] env[62684]: DEBUG oslo_concurrency.lockutils [None req-65a98383-e857-4032-bf62-caf9e538b299 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2226.468971] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-92e5635c-14ec-4f07-aaff-34ef8ef17fb4 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Volume attach. Driver type: vmdk {{(pid=62684) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2226.469216] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-92e5635c-14ec-4f07-aaff-34ef8ef17fb4 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421390', 'volume_id': 'cba4160c-2fba-4b43-b9ec-1bef4fb246a1', 'name': 'volume-cba4160c-2fba-4b43-b9ec-1bef4fb246a1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '584845d2-d146-42bf-8ef5-58532fe24f65', 'attached_at': '', 'detached_at': '', 'volume_id': 'cba4160c-2fba-4b43-b9ec-1bef4fb246a1', 'serial': 'cba4160c-2fba-4b43-b9ec-1bef4fb246a1'} {{(pid=62684) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2226.470109] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91aedd85-479c-4970-9713-381e23d74c6e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.487177] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb01fbe1-feed-4398-b55f-99a16be8d2cc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.510656] env[62684]: DEBUG nova.objects.base [None req-83de388b-5cbc-417e-bfb6-e7c4207a5031 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Object Instance<264c6900-dbef-455e-95cc-1df73c735cc8> lazy-loaded attributes: info_cache,migration_context {{(pid=62684) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2226.519459] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-92e5635c-14ec-4f07-aaff-34ef8ef17fb4 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Reconfiguring VM instance instance-0000005e to attach disk [datastore2] volume-cba4160c-2fba-4b43-b9ec-1bef4fb246a1/volume-cba4160c-2fba-4b43-b9ec-1bef4fb246a1.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2226.523915] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2e3f6cd-e7b9-484c-8259-d791ac9196ac {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.528020] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b5b9b879-b461-488f-b7f7-30ae6a5e93c0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.557682] env[62684]: DEBUG oslo_vmware.rw_handles [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528613b4-50b6-4826-5f8d-1b167659d156/disk-0.vmdk. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2226.558877] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3482cfd5-a651-4a74-9ca9-8b9fc86fa08d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.561823] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6ecc237-c59b-4075-94a3-7b91704cdad4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.564282] env[62684]: DEBUG oslo_vmware.api [None req-92e5635c-14ec-4f07-aaff-34ef8ef17fb4 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Waiting for the task: (returnval){ [ 2226.564282] env[62684]: value = "task-2053607" [ 2226.564282] env[62684]: _type = "Task" [ 2226.564282] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2226.570433] env[62684]: DEBUG oslo_vmware.rw_handles [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528613b4-50b6-4826-5f8d-1b167659d156/disk-0.vmdk is in state: ready. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2226.570598] env[62684]: ERROR oslo_vmware.rw_handles [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528613b4-50b6-4826-5f8d-1b167659d156/disk-0.vmdk due to incomplete transfer. [ 2226.570880] env[62684]: DEBUG oslo_vmware.api [None req-83de388b-5cbc-417e-bfb6-e7c4207a5031 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2226.570880] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52da71b9-fb24-34d4-035e-fb30f5602bc2" [ 2226.570880] env[62684]: _type = "Task" [ 2226.570880] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2226.571429] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-4f04d125-213a-4ef2-aad7-a26486a98c81 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.578431] env[62684]: DEBUG oslo_vmware.api [None req-92e5635c-14ec-4f07-aaff-34ef8ef17fb4 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053607, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2226.586025] env[62684]: DEBUG oslo_vmware.api [None req-83de388b-5cbc-417e-bfb6-e7c4207a5031 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52da71b9-fb24-34d4-035e-fb30f5602bc2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2226.586025] env[62684]: DEBUG oslo_vmware.rw_handles [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528613b4-50b6-4826-5f8d-1b167659d156/disk-0.vmdk. {{(pid=62684) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2226.586025] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Uploaded image 4cdf859f-d519-41eb-8ee6-7f85e8faee25 to the Glance image server {{(pid=62684) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2226.587861] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Destroying the VM {{(pid=62684) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2226.588464] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-c7f1d647-8682-43b6-ae64-13e06dab7d45 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.594399] env[62684]: DEBUG oslo_vmware.api [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2226.594399] env[62684]: value = "task-2053608" [ 2226.594399] env[62684]: _type = "Task" [ 2226.594399] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2226.604784] env[62684]: DEBUG oslo_vmware.api [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053608, 'name': Destroy_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2226.636828] env[62684]: DEBUG nova.network.neutron [req-d02a5654-d716-4dbb-ba83-f39cba5b5ba4 req-833442f5-c366-485f-ae47-a56c59239089 service nova] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Updated VIF entry in instance network info cache for port 61adf5cc-1692-4079-b909-b15313ce9680. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2226.637810] env[62684]: DEBUG nova.network.neutron [req-d02a5654-d716-4dbb-ba83-f39cba5b5ba4 req-833442f5-c366-485f-ae47-a56c59239089 service nova] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Updating instance_info_cache with network_info: [{"id": "61adf5cc-1692-4079-b909-b15313ce9680", "address": "fa:16:3e:17:9f:65", "network": {"id": "4142ba34-c2e0-4a22-a8dd-be06ba98c6e5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1627792019-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f0e0f0e1dc834134913bd742fa99b52f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61adf5cc-16", "ovs_interfaceid": "61adf5cc-1692-4079-b909-b15313ce9680", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2226.713142] env[62684]: DEBUG oslo_vmware.api [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521259cc-1f01-3934-fef8-cb89f06a7e0e, 'name': SearchDatastore_Task, 'duration_secs': 0.011169} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2226.714064] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2226.714546] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2226.715872] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2226.716162] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2226.716464] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2226.718981] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-34d60df6-f4f1-44b7-b0ea-6ae44886b432 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.728019] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2226.728019] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2226.728019] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4fd597d-e3e9-4970-903a-2ddc2ce3d0a9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.734355] env[62684]: DEBUG oslo_vmware.api [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for the task: (returnval){ [ 2226.734355] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]525e3db5-80f7-bfaf-e4cd-25711ef7cee7" [ 2226.734355] env[62684]: _type = "Task" [ 2226.734355] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2226.743785] env[62684]: DEBUG oslo_vmware.api [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]525e3db5-80f7-bfaf-e4cd-25711ef7cee7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2226.761877] env[62684]: DEBUG oslo_vmware.api [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053606, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.13638} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2226.761877] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2226.761877] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23cf974c-177b-4069-aa62-2ed7470c3a80 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.788019] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] 587edf89-2ea0-4b89-8830-fa766b798398/587edf89-2ea0-4b89-8830-fa766b798398.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2226.788019] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ab30a1be-e837-418f-b120-3a8aa59e01f8 tempest-ServerTagsTestJSON-296801673 tempest-ServerTagsTestJSON-296801673-project-member] Lock "3ff55331-6d5c-4558-b932-e266670f2ac9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.991s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2226.791495] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3cd3dc4a-0b01-431a-bfb2-6fda05ddba63 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.814089] env[62684]: DEBUG oslo_vmware.api [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2226.814089] env[62684]: value = "task-2053609" [ 2226.814089] env[62684]: _type = "Task" [ 2226.814089] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2226.826961] env[62684]: DEBUG oslo_vmware.api [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053609, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2226.993480] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2693e4d-87a7-4bbe-bdba-9f643a76cf6e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.001152] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-653c27bf-1890-4177-94e7-c0c5e9f196ae {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.031898] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e120ba4-9a78-4573-9d47-068e4f450e8d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.040132] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1d59cc7-454b-4923-8c2c-c9681acfa315 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.052951] env[62684]: DEBUG nova.compute.provider_tree [None req-9716f47d-c5e1-4b9d-bd74-ee1b05b96083 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2227.074060] env[62684]: DEBUG oslo_vmware.api [None req-92e5635c-14ec-4f07-aaff-34ef8ef17fb4 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053607, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2227.082065] env[62684]: DEBUG oslo_vmware.api [None req-83de388b-5cbc-417e-bfb6-e7c4207a5031 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52da71b9-fb24-34d4-035e-fb30f5602bc2, 'name': SearchDatastore_Task, 'duration_secs': 0.014405} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2227.082468] env[62684]: DEBUG oslo_concurrency.lockutils [None req-83de388b-5cbc-417e-bfb6-e7c4207a5031 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2227.109072] env[62684]: DEBUG oslo_vmware.api [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053608, 'name': Destroy_Task, 'duration_secs': 0.393959} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2227.109406] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Destroyed the VM [ 2227.109613] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Deleting Snapshot of the VM instance {{(pid=62684) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2227.109872] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-700cde59-830d-44d4-bc8f-f215bb58dded {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.115543] env[62684]: DEBUG oslo_vmware.api [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2227.115543] env[62684]: value = "task-2053610" [ 2227.115543] env[62684]: _type = "Task" [ 2227.115543] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2227.124888] env[62684]: DEBUG oslo_vmware.api [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053610, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2227.142099] env[62684]: DEBUG oslo_concurrency.lockutils [req-d02a5654-d716-4dbb-ba83-f39cba5b5ba4 req-833442f5-c366-485f-ae47-a56c59239089 service nova] Releasing lock "refresh_cache-9f1e9ae9-c082-4fbe-bd21-6e14e40962c1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2227.142377] env[62684]: DEBUG nova.compute.manager [req-d02a5654-d716-4dbb-ba83-f39cba5b5ba4 req-833442f5-c366-485f-ae47-a56c59239089 service nova] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Received event network-vif-deleted-3fcb3920-5e10-45e2-865d-cc9b89a1e335 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2227.246099] env[62684]: DEBUG oslo_vmware.api [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]525e3db5-80f7-bfaf-e4cd-25711ef7cee7, 'name': SearchDatastore_Task, 'duration_secs': 0.008321} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2227.246645] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a78308f6-54b5-4afc-aaac-fa88d463a4a7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.252358] env[62684]: DEBUG oslo_vmware.api [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for the task: (returnval){ [ 2227.252358] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e11701-2546-ef00-734a-bd09bd2f05f6" [ 2227.252358] env[62684]: _type = "Task" [ 2227.252358] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2227.261746] env[62684]: DEBUG oslo_vmware.api [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e11701-2546-ef00-734a-bd09bd2f05f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2227.324597] env[62684]: DEBUG oslo_vmware.api [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053609, 'name': ReconfigVM_Task, 'duration_secs': 0.311723} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2227.324934] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Reconfigured VM instance instance-00000067 to attach disk [datastore2] 587edf89-2ea0-4b89-8830-fa766b798398/587edf89-2ea0-4b89-8830-fa766b798398.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2227.325616] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-80073e88-c807-4128-9179-853eb6315fd5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.332290] env[62684]: DEBUG oslo_vmware.api [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2227.332290] env[62684]: value = "task-2053611" [ 2227.332290] env[62684]: _type = "Task" [ 2227.332290] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2227.340631] env[62684]: DEBUG oslo_vmware.api [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053611, 'name': Rename_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2227.557572] env[62684]: DEBUG nova.scheduler.client.report [None req-9716f47d-c5e1-4b9d-bd74-ee1b05b96083 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2227.574124] env[62684]: DEBUG oslo_vmware.api [None req-92e5635c-14ec-4f07-aaff-34ef8ef17fb4 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053607, 'name': ReconfigVM_Task, 'duration_secs': 0.93366} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2227.574449] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-92e5635c-14ec-4f07-aaff-34ef8ef17fb4 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Reconfigured VM instance instance-0000005e to attach disk [datastore2] volume-cba4160c-2fba-4b43-b9ec-1bef4fb246a1/volume-cba4160c-2fba-4b43-b9ec-1bef4fb246a1.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2227.580066] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd2c3ee9-9572-4676-a83b-cf3a47374e49 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.596295] env[62684]: DEBUG oslo_vmware.api [None req-92e5635c-14ec-4f07-aaff-34ef8ef17fb4 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Waiting for the task: (returnval){ [ 2227.596295] env[62684]: value = "task-2053612" [ 2227.596295] env[62684]: _type = "Task" [ 2227.596295] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2227.608276] env[62684]: DEBUG oslo_vmware.api [None req-92e5635c-14ec-4f07-aaff-34ef8ef17fb4 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053612, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2227.625489] env[62684]: DEBUG oslo_vmware.api [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053610, 'name': RemoveSnapshot_Task, 'duration_secs': 0.357948} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2227.625764] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Deleted Snapshot of the VM instance {{(pid=62684) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2227.626091] env[62684]: DEBUG nova.compute.manager [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2227.627726] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e096e4af-00c1-44d7-b859-cd7a3424fcbc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.762644] env[62684]: DEBUG oslo_vmware.api [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e11701-2546-ef00-734a-bd09bd2f05f6, 'name': SearchDatastore_Task, 'duration_secs': 0.009681} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2227.762924] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2227.763221] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1/9f1e9ae9-c082-4fbe-bd21-6e14e40962c1.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2227.763599] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-32bc7a3d-30ae-4a6b-bfa1-cff855e8162b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.769846] env[62684]: DEBUG oslo_vmware.api [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for the task: (returnval){ [ 2227.769846] env[62684]: value = "task-2053613" [ 2227.769846] env[62684]: _type = "Task" [ 2227.769846] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2227.777545] env[62684]: DEBUG oslo_vmware.api [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053613, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2227.843123] env[62684]: DEBUG oslo_vmware.api [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053611, 'name': Rename_Task, 'duration_secs': 0.15045} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2227.843490] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2227.843777] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-73fe6999-3a0e-4f19-b274-e203b2bb62d0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.850726] env[62684]: DEBUG oslo_vmware.api [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2227.850726] env[62684]: value = "task-2053614" [ 2227.850726] env[62684]: _type = "Task" [ 2227.850726] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2227.859345] env[62684]: DEBUG oslo_vmware.api [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053614, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2228.063690] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9716f47d-c5e1-4b9d-bd74-ee1b05b96083 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.829s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2228.066291] env[62684]: DEBUG oslo_concurrency.lockutils [None req-65a98383-e857-4032-bf62-caf9e538b299 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.600s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2228.066521] env[62684]: DEBUG nova.objects.instance [None req-65a98383-e857-4032-bf62-caf9e538b299 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lazy-loading 'resources' on Instance uuid 2baabe7a-ed33-4cef-9acc-a7b804610b0a {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2228.092564] env[62684]: INFO nova.scheduler.client.report [None req-9716f47d-c5e1-4b9d-bd74-ee1b05b96083 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Deleted allocations for instance d7f09d0e-f7b6-415e-8d82-47eba1153aa1 [ 2228.108713] env[62684]: DEBUG oslo_vmware.api [None req-92e5635c-14ec-4f07-aaff-34ef8ef17fb4 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053612, 'name': ReconfigVM_Task, 'duration_secs': 0.171249} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2228.109073] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-92e5635c-14ec-4f07-aaff-34ef8ef17fb4 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421390', 'volume_id': 'cba4160c-2fba-4b43-b9ec-1bef4fb246a1', 'name': 'volume-cba4160c-2fba-4b43-b9ec-1bef4fb246a1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '584845d2-d146-42bf-8ef5-58532fe24f65', 'attached_at': '', 'detached_at': '', 'volume_id': 'cba4160c-2fba-4b43-b9ec-1bef4fb246a1', 'serial': 'cba4160c-2fba-4b43-b9ec-1bef4fb246a1'} {{(pid=62684) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2228.141716] env[62684]: INFO nova.compute.manager [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Shelve offloading [ 2228.143913] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2228.144205] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a7f7c2c4-9a31-4e1f-b157-ea267697cba5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.153430] env[62684]: DEBUG oslo_vmware.api [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2228.153430] env[62684]: value = "task-2053615" [ 2228.153430] env[62684]: _type = "Task" [ 2228.153430] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2228.167103] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] VM already powered off {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2228.167451] env[62684]: DEBUG nova.compute.manager [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2228.168503] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c75f3792-8195-4d48-b8de-96180d79dd67 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.176212] env[62684]: DEBUG oslo_concurrency.lockutils [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "refresh_cache-02480039-f749-402a-92db-df664304a5bf" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2228.176367] env[62684]: DEBUG oslo_concurrency.lockutils [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquired lock "refresh_cache-02480039-f749-402a-92db-df664304a5bf" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2228.176903] env[62684]: DEBUG nova.network.neutron [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2228.280067] env[62684]: DEBUG oslo_vmware.api [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053613, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.469675} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2228.280416] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1/9f1e9ae9-c082-4fbe-bd21-6e14e40962c1.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2228.280640] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2228.280903] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-89a75fe7-b9cb-4fcd-8567-fa1c338b34dd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.288433] env[62684]: DEBUG oslo_vmware.api [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for the task: (returnval){ [ 2228.288433] env[62684]: value = "task-2053616" [ 2228.288433] env[62684]: _type = "Task" [ 2228.288433] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2228.296313] env[62684]: DEBUG oslo_vmware.api [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053616, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2228.361890] env[62684]: DEBUG oslo_vmware.api [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053614, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2228.607815] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9716f47d-c5e1-4b9d-bd74-ee1b05b96083 tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "d7f09d0e-f7b6-415e-8d82-47eba1153aa1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.919s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2228.764660] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c22ae50a-8201-4db7-997d-357e8fdc4e7c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.772647] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9d3b34a-ca19-4643-8d0b-c94bf2f506cf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.812362] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28d34df2-ed28-4c2d-8f5f-31fca05008ac {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.820589] env[62684]: DEBUG oslo_vmware.api [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053616, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060298} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2228.822868] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2228.823842] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c8aa8f5-3bcb-46af-a88e-9625acb58ce8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.827219] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c38ea73e-ff97-4a6a-b638-bf370cfecec6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.850892] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Reconfiguring VM instance instance-00000068 to attach disk [datastore2] 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1/9f1e9ae9-c082-4fbe-bd21-6e14e40962c1.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2228.861904] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1e3eee46-244b-46bb-8ce4-b7b4571ad7e6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.876773] env[62684]: DEBUG nova.compute.provider_tree [None req-65a98383-e857-4032-bf62-caf9e538b299 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2228.886992] env[62684]: DEBUG oslo_vmware.api [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053614, 'name': PowerOnVM_Task, 'duration_secs': 0.604174} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2228.889063] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2228.889325] env[62684]: INFO nova.compute.manager [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Took 7.70 seconds to spawn the instance on the hypervisor. [ 2228.889498] env[62684]: DEBUG nova.compute.manager [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2228.890235] env[62684]: DEBUG oslo_vmware.api [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for the task: (returnval){ [ 2228.890235] env[62684]: value = "task-2053617" [ 2228.890235] env[62684]: _type = "Task" [ 2228.890235] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2228.890628] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7379f6e4-6b1a-4425-b7e5-8ac83c303674 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.902113] env[62684]: DEBUG oslo_vmware.api [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053617, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2228.957997] env[62684]: DEBUG nova.network.neutron [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Updating instance_info_cache with network_info: [{"id": "3f61101e-05cd-4c60-ad9a-8a272d5e8879", "address": "fa:16:3e:f8:fd:88", "network": {"id": "b24dd0c0-a394-4ca6-a79a-94535bc1df6f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2023102141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "607a0aa1049640d882d7dd490f5f98ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f61101e-05", "ovs_interfaceid": "3f61101e-05cd-4c60-ad9a-8a272d5e8879", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2229.149010] env[62684]: DEBUG nova.objects.instance [None req-92e5635c-14ec-4f07-aaff-34ef8ef17fb4 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lazy-loading 'flavor' on Instance uuid 584845d2-d146-42bf-8ef5-58532fe24f65 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2229.383270] env[62684]: DEBUG nova.scheduler.client.report [None req-65a98383-e857-4032-bf62-caf9e538b299 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2229.418202] env[62684]: DEBUG oslo_vmware.api [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053617, 'name': ReconfigVM_Task, 'duration_secs': 0.277725} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2229.421150] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Reconfigured VM instance instance-00000068 to attach disk [datastore2] 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1/9f1e9ae9-c082-4fbe-bd21-6e14e40962c1.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2229.422103] env[62684]: INFO nova.compute.manager [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Took 12.24 seconds to build instance. [ 2229.423127] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-99fd57af-a64e-4057-a0c1-b42511c6a42b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.436525] env[62684]: DEBUG oslo_vmware.api [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for the task: (returnval){ [ 2229.436525] env[62684]: value = "task-2053618" [ 2229.436525] env[62684]: _type = "Task" [ 2229.436525] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2229.453966] env[62684]: DEBUG oslo_vmware.api [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053618, 'name': Rename_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2229.461209] env[62684]: DEBUG oslo_concurrency.lockutils [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Releasing lock "refresh_cache-02480039-f749-402a-92db-df664304a5bf" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2229.576100] env[62684]: DEBUG oslo_concurrency.lockutils [None req-54e4c030-e9fc-4721-8e7b-db621f7b1c52 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Acquiring lock "584845d2-d146-42bf-8ef5-58532fe24f65" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2229.654861] env[62684]: DEBUG oslo_concurrency.lockutils [None req-92e5635c-14ec-4f07-aaff-34ef8ef17fb4 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "584845d2-d146-42bf-8ef5-58532fe24f65" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.816s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2229.655827] env[62684]: DEBUG oslo_concurrency.lockutils [None req-54e4c030-e9fc-4721-8e7b-db621f7b1c52 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "584845d2-d146-42bf-8ef5-58532fe24f65" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.080s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2229.730081] env[62684]: DEBUG nova.compute.manager [req-3b4068f1-3d2e-47ac-bc8c-7c34a3a185aa req-0f14d1de-a219-49c5-a2dc-41d83e603ad4 service nova] [instance: 02480039-f749-402a-92db-df664304a5bf] Received event network-vif-unplugged-3f61101e-05cd-4c60-ad9a-8a272d5e8879 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2229.730365] env[62684]: DEBUG oslo_concurrency.lockutils [req-3b4068f1-3d2e-47ac-bc8c-7c34a3a185aa req-0f14d1de-a219-49c5-a2dc-41d83e603ad4 service nova] Acquiring lock "02480039-f749-402a-92db-df664304a5bf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2229.730641] env[62684]: DEBUG oslo_concurrency.lockutils [req-3b4068f1-3d2e-47ac-bc8c-7c34a3a185aa req-0f14d1de-a219-49c5-a2dc-41d83e603ad4 service nova] Lock "02480039-f749-402a-92db-df664304a5bf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2229.730852] env[62684]: DEBUG oslo_concurrency.lockutils [req-3b4068f1-3d2e-47ac-bc8c-7c34a3a185aa req-0f14d1de-a219-49c5-a2dc-41d83e603ad4 service nova] Lock "02480039-f749-402a-92db-df664304a5bf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2229.731208] env[62684]: DEBUG nova.compute.manager [req-3b4068f1-3d2e-47ac-bc8c-7c34a3a185aa req-0f14d1de-a219-49c5-a2dc-41d83e603ad4 service nova] [instance: 02480039-f749-402a-92db-df664304a5bf] No waiting events found dispatching network-vif-unplugged-3f61101e-05cd-4c60-ad9a-8a272d5e8879 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2229.731481] env[62684]: WARNING nova.compute.manager [req-3b4068f1-3d2e-47ac-bc8c-7c34a3a185aa req-0f14d1de-a219-49c5-a2dc-41d83e603ad4 service nova] [instance: 02480039-f749-402a-92db-df664304a5bf] Received unexpected event network-vif-unplugged-3f61101e-05cd-4c60-ad9a-8a272d5e8879 for instance with vm_state shelved and task_state shelving_offloading. [ 2229.758837] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2229.759984] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c55a4bd-98c7-40ac-99ff-74e64b609696 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.767580] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2229.767825] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fa8892a6-b31d-499e-bb4b-485db76eb9c1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.784457] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dc5b920b-5678-438e-8f86-bd9ef5c52e4b tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "f037d6b2-2082-4611-985e-b9a077eb8250" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2229.784712] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dc5b920b-5678-438e-8f86-bd9ef5c52e4b tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "f037d6b2-2082-4611-985e-b9a077eb8250" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2229.785302] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dc5b920b-5678-438e-8f86-bd9ef5c52e4b tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "f037d6b2-2082-4611-985e-b9a077eb8250-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2229.785518] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dc5b920b-5678-438e-8f86-bd9ef5c52e4b tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "f037d6b2-2082-4611-985e-b9a077eb8250-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2229.785703] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dc5b920b-5678-438e-8f86-bd9ef5c52e4b tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "f037d6b2-2082-4611-985e-b9a077eb8250-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2229.787943] env[62684]: INFO nova.compute.manager [None req-dc5b920b-5678-438e-8f86-bd9ef5c52e4b tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Terminating instance [ 2229.789743] env[62684]: DEBUG nova.compute.manager [None req-dc5b920b-5678-438e-8f86-bd9ef5c52e4b tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2229.789947] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-dc5b920b-5678-438e-8f86-bd9ef5c52e4b tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2229.790786] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bb9da3c-2831-4621-b22d-6f96f497fadb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.799014] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc5b920b-5678-438e-8f86-bd9ef5c52e4b tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2229.799334] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c3f698ec-abac-442e-8d80-f610dd43f69a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.806622] env[62684]: DEBUG nova.compute.manager [req-e5a2d115-49b9-43fd-b441-06bc3e0af28a req-d74ecf19-e124-4507-a2a8-4ab0951cc81f service nova] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Received event network-changed-91d5100c-0d94-42a3-a4f2-5055bd108b50 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2229.806827] env[62684]: DEBUG nova.compute.manager [req-e5a2d115-49b9-43fd-b441-06bc3e0af28a req-d74ecf19-e124-4507-a2a8-4ab0951cc81f service nova] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Refreshing instance network info cache due to event network-changed-91d5100c-0d94-42a3-a4f2-5055bd108b50. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2229.807062] env[62684]: DEBUG oslo_concurrency.lockutils [req-e5a2d115-49b9-43fd-b441-06bc3e0af28a req-d74ecf19-e124-4507-a2a8-4ab0951cc81f service nova] Acquiring lock "refresh_cache-587edf89-2ea0-4b89-8830-fa766b798398" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2229.807216] env[62684]: DEBUG oslo_concurrency.lockutils [req-e5a2d115-49b9-43fd-b441-06bc3e0af28a req-d74ecf19-e124-4507-a2a8-4ab0951cc81f service nova] Acquired lock "refresh_cache-587edf89-2ea0-4b89-8830-fa766b798398" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2229.808060] env[62684]: DEBUG nova.network.neutron [req-e5a2d115-49b9-43fd-b441-06bc3e0af28a req-d74ecf19-e124-4507-a2a8-4ab0951cc81f service nova] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Refreshing network info cache for port 91d5100c-0d94-42a3-a4f2-5055bd108b50 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2229.813592] env[62684]: DEBUG oslo_vmware.api [None req-dc5b920b-5678-438e-8f86-bd9ef5c52e4b tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 2229.813592] env[62684]: value = "task-2053620" [ 2229.813592] env[62684]: _type = "Task" [ 2229.813592] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2229.828626] env[62684]: DEBUG oslo_vmware.api [None req-dc5b920b-5678-438e-8f86-bd9ef5c52e4b tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053620, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2229.870659] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2229.870910] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2229.871192] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Deleting the datastore file [datastore2] 02480039-f749-402a-92db-df664304a5bf {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2229.871475] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5a431e2e-0a2d-42e3-a566-ecc7551b5e4a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.877389] env[62684]: DEBUG oslo_vmware.api [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2229.877389] env[62684]: value = "task-2053621" [ 2229.877389] env[62684]: _type = "Task" [ 2229.877389] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2229.886076] env[62684]: DEBUG oslo_vmware.api [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053621, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2229.888954] env[62684]: DEBUG oslo_concurrency.lockutils [None req-65a98383-e857-4032-bf62-caf9e538b299 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.823s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2229.891980] env[62684]: DEBUG oslo_concurrency.lockutils [None req-83de388b-5cbc-417e-bfb6-e7c4207a5031 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 2.810s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2229.921665] env[62684]: INFO nova.scheduler.client.report [None req-65a98383-e857-4032-bf62-caf9e538b299 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Deleted allocations for instance 2baabe7a-ed33-4cef-9acc-a7b804610b0a [ 2229.929914] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c7ddec31-418e-40c5-aff5-fceeb4bbdfc7 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "587edf89-2ea0-4b89-8830-fa766b798398" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.762s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2229.947969] env[62684]: DEBUG oslo_vmware.api [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053618, 'name': Rename_Task, 'duration_secs': 0.132443} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2229.948496] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2229.948774] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ed3057ad-9d8d-4955-bd30-726fed3d1ef8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.955250] env[62684]: DEBUG oslo_vmware.api [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for the task: (returnval){ [ 2229.955250] env[62684]: value = "task-2053622" [ 2229.955250] env[62684]: _type = "Task" [ 2229.955250] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2229.963032] env[62684]: DEBUG oslo_vmware.api [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053622, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2230.161732] env[62684]: INFO nova.compute.manager [None req-54e4c030-e9fc-4721-8e7b-db621f7b1c52 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Detaching volume 93dd9c99-f5f4-43f7-8461-d3e47a775078 [ 2230.194240] env[62684]: INFO nova.virt.block_device [None req-54e4c030-e9fc-4721-8e7b-db621f7b1c52 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Attempting to driver detach volume 93dd9c99-f5f4-43f7-8461-d3e47a775078 from mountpoint /dev/sdb [ 2230.194555] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-54e4c030-e9fc-4721-8e7b-db621f7b1c52 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Volume detach. Driver type: vmdk {{(pid=62684) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2230.194810] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-54e4c030-e9fc-4721-8e7b-db621f7b1c52 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421388', 'volume_id': '93dd9c99-f5f4-43f7-8461-d3e47a775078', 'name': 'volume-93dd9c99-f5f4-43f7-8461-d3e47a775078', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '584845d2-d146-42bf-8ef5-58532fe24f65', 'attached_at': '', 'detached_at': '', 'volume_id': '93dd9c99-f5f4-43f7-8461-d3e47a775078', 'serial': '93dd9c99-f5f4-43f7-8461-d3e47a775078'} {{(pid=62684) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2230.195767] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a62d6413-76ac-4473-9e53-edfd4442b205 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.223945] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2de8c398-60ff-42b1-b591-8e4a3c0eb487 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquiring lock "daf1486b-d5c2-4341-8a27-36eeeb08cd26" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2230.224254] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2de8c398-60ff-42b1-b591-8e4a3c0eb487 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lock "daf1486b-d5c2-4341-8a27-36eeeb08cd26" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2230.224485] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2de8c398-60ff-42b1-b591-8e4a3c0eb487 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquiring lock "daf1486b-d5c2-4341-8a27-36eeeb08cd26-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2230.224672] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2de8c398-60ff-42b1-b591-8e4a3c0eb487 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lock "daf1486b-d5c2-4341-8a27-36eeeb08cd26-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2230.224851] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2de8c398-60ff-42b1-b591-8e4a3c0eb487 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lock "daf1486b-d5c2-4341-8a27-36eeeb08cd26-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2230.227082] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3805805b-a60a-4093-a758-58e3d730a47f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.230063] env[62684]: INFO nova.compute.manager [None req-2de8c398-60ff-42b1-b591-8e4a3c0eb487 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Terminating instance [ 2230.232193] env[62684]: DEBUG nova.compute.manager [None req-2de8c398-60ff-42b1-b591-8e4a3c0eb487 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2230.232398] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2de8c398-60ff-42b1-b591-8e4a3c0eb487 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2230.233150] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c59f3867-2fbc-472a-9ff0-7068fd9659ab {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.238690] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92f89abf-8ac2-4fcf-9bd0-48611cf83aad {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.242850] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2de8c398-60ff-42b1-b591-8e4a3c0eb487 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2230.243344] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aa15a985-9751-4542-9593-a4899f170c4e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.268553] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1328855f-7b6f-46c4-972d-57eaf717be19 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.270988] env[62684]: DEBUG oslo_vmware.api [None req-2de8c398-60ff-42b1-b591-8e4a3c0eb487 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 2230.270988] env[62684]: value = "task-2053623" [ 2230.270988] env[62684]: _type = "Task" [ 2230.270988] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2230.284598] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-54e4c030-e9fc-4721-8e7b-db621f7b1c52 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] The volume has not been displaced from its original location: [datastore1] volume-93dd9c99-f5f4-43f7-8461-d3e47a775078/volume-93dd9c99-f5f4-43f7-8461-d3e47a775078.vmdk. No consolidation needed. {{(pid=62684) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2230.289823] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-54e4c030-e9fc-4721-8e7b-db621f7b1c52 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Reconfiguring VM instance instance-0000005e to detach disk 2001 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2230.290588] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5105fe5c-1d96-4207-844c-d37bf58bcf76 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.307734] env[62684]: DEBUG oslo_vmware.api [None req-2de8c398-60ff-42b1-b591-8e4a3c0eb487 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053623, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2230.313209] env[62684]: DEBUG oslo_vmware.api [None req-54e4c030-e9fc-4721-8e7b-db621f7b1c52 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Waiting for the task: (returnval){ [ 2230.313209] env[62684]: value = "task-2053624" [ 2230.313209] env[62684]: _type = "Task" [ 2230.313209] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2230.326562] env[62684]: DEBUG oslo_vmware.api [None req-54e4c030-e9fc-4721-8e7b-db621f7b1c52 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053624, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2230.329620] env[62684]: DEBUG oslo_vmware.api [None req-dc5b920b-5678-438e-8f86-bd9ef5c52e4b tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053620, 'name': PowerOffVM_Task, 'duration_secs': 0.251292} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2230.330354] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc5b920b-5678-438e-8f86-bd9ef5c52e4b tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2230.330354] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-dc5b920b-5678-438e-8f86-bd9ef5c52e4b tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2230.330354] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3bcb4da3-8819-4e3c-9014-b49fe392cda8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.388485] env[62684]: DEBUG oslo_vmware.api [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053621, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147303} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2230.388823] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2230.389037] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2230.389246] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2230.419770] env[62684]: INFO nova.scheduler.client.report [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Deleted allocations for instance 02480039-f749-402a-92db-df664304a5bf [ 2230.431227] env[62684]: DEBUG oslo_concurrency.lockutils [None req-65a98383-e857-4032-bf62-caf9e538b299 tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lock "2baabe7a-ed33-4cef-9acc-a7b804610b0a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.496s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2230.439372] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-dc5b920b-5678-438e-8f86-bd9ef5c52e4b tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2230.439620] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-dc5b920b-5678-438e-8f86-bd9ef5c52e4b tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2230.439804] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc5b920b-5678-438e-8f86-bd9ef5c52e4b tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Deleting the datastore file [datastore1] f037d6b2-2082-4611-985e-b9a077eb8250 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2230.440101] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aaece306-fb62-4d9f-85ac-4835ea54efbb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.447413] env[62684]: DEBUG oslo_vmware.api [None req-dc5b920b-5678-438e-8f86-bd9ef5c52e4b tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for the task: (returnval){ [ 2230.447413] env[62684]: value = "task-2053626" [ 2230.447413] env[62684]: _type = "Task" [ 2230.447413] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2230.471042] env[62684]: DEBUG oslo_vmware.api [None req-dc5b920b-5678-438e-8f86-bd9ef5c52e4b tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053626, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2230.483472] env[62684]: DEBUG oslo_vmware.api [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053622, 'name': PowerOnVM_Task, 'duration_secs': 0.450044} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2230.483772] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2230.483981] env[62684]: INFO nova.compute.manager [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Took 6.90 seconds to spawn the instance on the hypervisor. [ 2230.484179] env[62684]: DEBUG nova.compute.manager [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2230.486909] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0080903-b123-4a25-bc20-37cd05e3504a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.625857] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edaf6dbb-1693-4bab-9e6a-6c2e45152e81 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.633473] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e3bc007-e0f3-4a14-ba87-b944ee0a12af {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.663162] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f263e9d-6848-40ed-ac1e-cb4cb410db17 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.671723] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b27f3a6f-ebe1-4a7a-b2de-764d8abb7ad2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.684536] env[62684]: DEBUG nova.compute.provider_tree [None req-83de388b-5cbc-417e-bfb6-e7c4207a5031 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2230.736030] env[62684]: DEBUG nova.network.neutron [req-e5a2d115-49b9-43fd-b441-06bc3e0af28a req-d74ecf19-e124-4507-a2a8-4ab0951cc81f service nova] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Updated VIF entry in instance network info cache for port 91d5100c-0d94-42a3-a4f2-5055bd108b50. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2230.736303] env[62684]: DEBUG nova.network.neutron [req-e5a2d115-49b9-43fd-b441-06bc3e0af28a req-d74ecf19-e124-4507-a2a8-4ab0951cc81f service nova] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Updating instance_info_cache with network_info: [{"id": "91d5100c-0d94-42a3-a4f2-5055bd108b50", "address": "fa:16:3e:58:06:3e", "network": {"id": "e177c6d0-ddd5-4029-94af-c8f1b937dd9f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1344612161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27d04006afc747e19ad87238bfdbaad1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "171aeae0-6a27-44fc-bc3d-a2d5581fc702", "external-id": "nsx-vlan-transportzone-410", "segmentation_id": 410, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91d5100c-0d", "ovs_interfaceid": "91d5100c-0d94-42a3-a4f2-5055bd108b50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2230.781419] env[62684]: DEBUG oslo_vmware.api [None req-2de8c398-60ff-42b1-b591-8e4a3c0eb487 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053623, 'name': PowerOffVM_Task, 'duration_secs': 0.199938} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2230.782065] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2de8c398-60ff-42b1-b591-8e4a3c0eb487 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2230.782065] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2de8c398-60ff-42b1-b591-8e4a3c0eb487 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2230.782326] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7f0bc38b-ea6f-4e9c-a38b-171491b7556c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.823070] env[62684]: DEBUG oslo_vmware.api [None req-54e4c030-e9fc-4721-8e7b-db621f7b1c52 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053624, 'name': ReconfigVM_Task, 'duration_secs': 0.270524} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2230.823377] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-54e4c030-e9fc-4721-8e7b-db621f7b1c52 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Reconfigured VM instance instance-0000005e to detach disk 2001 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2230.828245] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-56574574-696f-4ab0-b26a-fa7ce060870a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.843048] env[62684]: DEBUG oslo_vmware.api [None req-54e4c030-e9fc-4721-8e7b-db621f7b1c52 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Waiting for the task: (returnval){ [ 2230.843048] env[62684]: value = "task-2053629" [ 2230.843048] env[62684]: _type = "Task" [ 2230.843048] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2230.851228] env[62684]: DEBUG oslo_vmware.api [None req-54e4c030-e9fc-4721-8e7b-db621f7b1c52 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053629, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2230.882243] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2de8c398-60ff-42b1-b591-8e4a3c0eb487 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2230.882515] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2de8c398-60ff-42b1-b591-8e4a3c0eb487 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2230.882691] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-2de8c398-60ff-42b1-b591-8e4a3c0eb487 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Deleting the datastore file [datastore2] daf1486b-d5c2-4341-8a27-36eeeb08cd26 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2230.882960] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6650166b-4ebd-4743-af26-9f1442536b9f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.888699] env[62684]: DEBUG oslo_vmware.api [None req-2de8c398-60ff-42b1-b591-8e4a3c0eb487 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for the task: (returnval){ [ 2230.888699] env[62684]: value = "task-2053630" [ 2230.888699] env[62684]: _type = "Task" [ 2230.888699] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2230.898293] env[62684]: DEBUG oslo_vmware.api [None req-2de8c398-60ff-42b1-b591-8e4a3c0eb487 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053630, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2230.931972] env[62684]: DEBUG oslo_concurrency.lockutils [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2230.956957] env[62684]: DEBUG oslo_vmware.api [None req-dc5b920b-5678-438e-8f86-bd9ef5c52e4b tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Task: {'id': task-2053626, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.190607} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2230.957251] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc5b920b-5678-438e-8f86-bd9ef5c52e4b tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2230.957440] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-dc5b920b-5678-438e-8f86-bd9ef5c52e4b tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2230.957647] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-dc5b920b-5678-438e-8f86-bd9ef5c52e4b tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2230.957833] env[62684]: INFO nova.compute.manager [None req-dc5b920b-5678-438e-8f86-bd9ef5c52e4b tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Took 1.17 seconds to destroy the instance on the hypervisor. [ 2230.958119] env[62684]: DEBUG oslo.service.loopingcall [None req-dc5b920b-5678-438e-8f86-bd9ef5c52e4b tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2230.958349] env[62684]: DEBUG nova.compute.manager [-] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2230.958446] env[62684]: DEBUG nova.network.neutron [-] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2231.016071] env[62684]: INFO nova.compute.manager [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Took 12.65 seconds to build instance. [ 2231.190394] env[62684]: DEBUG nova.scheduler.client.report [None req-83de388b-5cbc-417e-bfb6-e7c4207a5031 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2231.238796] env[62684]: DEBUG oslo_concurrency.lockutils [req-e5a2d115-49b9-43fd-b441-06bc3e0af28a req-d74ecf19-e124-4507-a2a8-4ab0951cc81f service nova] Releasing lock "refresh_cache-587edf89-2ea0-4b89-8830-fa766b798398" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2231.352498] env[62684]: DEBUG oslo_vmware.api [None req-54e4c030-e9fc-4721-8e7b-db621f7b1c52 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053629, 'name': ReconfigVM_Task, 'duration_secs': 0.149329} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2231.352882] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-54e4c030-e9fc-4721-8e7b-db621f7b1c52 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421388', 'volume_id': '93dd9c99-f5f4-43f7-8461-d3e47a775078', 'name': 'volume-93dd9c99-f5f4-43f7-8461-d3e47a775078', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '584845d2-d146-42bf-8ef5-58532fe24f65', 'attached_at': '', 'detached_at': '', 'volume_id': '93dd9c99-f5f4-43f7-8461-d3e47a775078', 'serial': '93dd9c99-f5f4-43f7-8461-d3e47a775078'} {{(pid=62684) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2231.401053] env[62684]: DEBUG oslo_vmware.api [None req-2de8c398-60ff-42b1-b591-8e4a3c0eb487 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Task: {'id': task-2053630, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.176111} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2231.401499] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-2de8c398-60ff-42b1-b591-8e4a3c0eb487 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2231.401801] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2de8c398-60ff-42b1-b591-8e4a3c0eb487 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2231.402107] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2de8c398-60ff-42b1-b591-8e4a3c0eb487 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2231.402462] env[62684]: INFO nova.compute.manager [None req-2de8c398-60ff-42b1-b591-8e4a3c0eb487 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Took 1.17 seconds to destroy the instance on the hypervisor. [ 2231.402828] env[62684]: DEBUG oslo.service.loopingcall [None req-2de8c398-60ff-42b1-b591-8e4a3c0eb487 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2231.403127] env[62684]: DEBUG nova.compute.manager [-] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2231.403257] env[62684]: DEBUG nova.network.neutron [-] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2231.518221] env[62684]: DEBUG oslo_concurrency.lockutils [None req-4f5597bd-7d73-4ad8-b2ce-1303a0b1e8e1 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Lock "9f1e9ae9-c082-4fbe-bd21-6e14e40962c1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.166s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2231.700549] env[62684]: DEBUG nova.network.neutron [-] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2231.795259] env[62684]: DEBUG nova.compute.manager [req-27492bf3-5379-4bba-bf86-37e03bdfe24e req-653dad2d-7431-4498-a921-dc6c54d914be service nova] [instance: 02480039-f749-402a-92db-df664304a5bf] Received event network-changed-3f61101e-05cd-4c60-ad9a-8a272d5e8879 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2231.795259] env[62684]: DEBUG nova.compute.manager [req-27492bf3-5379-4bba-bf86-37e03bdfe24e req-653dad2d-7431-4498-a921-dc6c54d914be service nova] [instance: 02480039-f749-402a-92db-df664304a5bf] Refreshing instance network info cache due to event network-changed-3f61101e-05cd-4c60-ad9a-8a272d5e8879. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2231.795259] env[62684]: DEBUG oslo_concurrency.lockutils [req-27492bf3-5379-4bba-bf86-37e03bdfe24e req-653dad2d-7431-4498-a921-dc6c54d914be service nova] Acquiring lock "refresh_cache-02480039-f749-402a-92db-df664304a5bf" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2231.795259] env[62684]: DEBUG oslo_concurrency.lockutils [req-27492bf3-5379-4bba-bf86-37e03bdfe24e req-653dad2d-7431-4498-a921-dc6c54d914be service nova] Acquired lock "refresh_cache-02480039-f749-402a-92db-df664304a5bf" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2231.795259] env[62684]: DEBUG nova.network.neutron [req-27492bf3-5379-4bba-bf86-37e03bdfe24e req-653dad2d-7431-4498-a921-dc6c54d914be service nova] [instance: 02480039-f749-402a-92db-df664304a5bf] Refreshing network info cache for port 3f61101e-05cd-4c60-ad9a-8a272d5e8879 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2231.894488] env[62684]: DEBUG nova.objects.instance [None req-54e4c030-e9fc-4721-8e7b-db621f7b1c52 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lazy-loading 'flavor' on Instance uuid 584845d2-d146-42bf-8ef5-58532fe24f65 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2231.975372] env[62684]: DEBUG oslo_concurrency.lockutils [None req-586fe5e5-a4da-48b6-b1d9-0dc182e474bf tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "02480039-f749-402a-92db-df664304a5bf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2232.204191] env[62684]: DEBUG oslo_concurrency.lockutils [None req-83de388b-5cbc-417e-bfb6-e7c4207a5031 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.312s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2232.209018] env[62684]: DEBUG oslo_concurrency.lockutils [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.276s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2232.209018] env[62684]: DEBUG nova.objects.instance [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lazy-loading 'resources' on Instance uuid 02480039-f749-402a-92db-df664304a5bf {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2232.209018] env[62684]: INFO nova.compute.manager [-] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Took 1.25 seconds to deallocate network for instance. [ 2232.510318] env[62684]: DEBUG nova.network.neutron [req-27492bf3-5379-4bba-bf86-37e03bdfe24e req-653dad2d-7431-4498-a921-dc6c54d914be service nova] [instance: 02480039-f749-402a-92db-df664304a5bf] Updated VIF entry in instance network info cache for port 3f61101e-05cd-4c60-ad9a-8a272d5e8879. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2232.510662] env[62684]: DEBUG nova.network.neutron [req-27492bf3-5379-4bba-bf86-37e03bdfe24e req-653dad2d-7431-4498-a921-dc6c54d914be service nova] [instance: 02480039-f749-402a-92db-df664304a5bf] Updating instance_info_cache with network_info: [{"id": "3f61101e-05cd-4c60-ad9a-8a272d5e8879", "address": "fa:16:3e:f8:fd:88", "network": {"id": "b24dd0c0-a394-4ca6-a79a-94535bc1df6f", "bridge": null, "label": "tempest-DeleteServersTestJSON-2023102141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "607a0aa1049640d882d7dd490f5f98ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap3f61101e-05", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2232.570895] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Acquiring lock "0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2232.571161] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Lock "0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2232.572204] env[62684]: DEBUG nova.network.neutron [-] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2232.713106] env[62684]: DEBUG nova.objects.instance [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lazy-loading 'numa_topology' on Instance uuid 02480039-f749-402a-92db-df664304a5bf {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2232.716598] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dc5b920b-5678-438e-8f86-bd9ef5c52e4b tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2232.766559] env[62684]: INFO nova.scheduler.client.report [None req-83de388b-5cbc-417e-bfb6-e7c4207a5031 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Deleted allocation for migration 3ac08fc8-b87f-4785-ac54-acacbaf5dfc4 [ 2232.853124] env[62684]: DEBUG oslo_concurrency.lockutils [None req-73fd32fe-3ce2-4f4e-b3ea-f3e77d469ac3 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Acquiring lock "584845d2-d146-42bf-8ef5-58532fe24f65" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2232.903886] env[62684]: DEBUG oslo_concurrency.lockutils [None req-54e4c030-e9fc-4721-8e7b-db621f7b1c52 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "584845d2-d146-42bf-8ef5-58532fe24f65" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.248s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2232.905135] env[62684]: DEBUG oslo_concurrency.lockutils [None req-73fd32fe-3ce2-4f4e-b3ea-f3e77d469ac3 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "584845d2-d146-42bf-8ef5-58532fe24f65" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.052s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2233.013933] env[62684]: DEBUG oslo_concurrency.lockutils [req-27492bf3-5379-4bba-bf86-37e03bdfe24e req-653dad2d-7431-4498-a921-dc6c54d914be service nova] Releasing lock "refresh_cache-02480039-f749-402a-92db-df664304a5bf" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2233.014262] env[62684]: DEBUG nova.compute.manager [req-27492bf3-5379-4bba-bf86-37e03bdfe24e req-653dad2d-7431-4498-a921-dc6c54d914be service nova] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Received event network-vif-deleted-b5747949-00d7-4815-9080-52285a6a8813 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2233.073941] env[62684]: DEBUG nova.compute.manager [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2233.077095] env[62684]: INFO nova.compute.manager [-] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Took 1.67 seconds to deallocate network for instance. [ 2233.167028] env[62684]: DEBUG oslo_concurrency.lockutils [None req-02356f11-6b01-4092-a365-32307288b882 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "264c6900-dbef-455e-95cc-1df73c735cc8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2233.217959] env[62684]: DEBUG nova.objects.base [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Object Instance<02480039-f749-402a-92db-df664304a5bf> lazy-loaded attributes: resources,numa_topology {{(pid=62684) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2233.272685] env[62684]: DEBUG oslo_concurrency.lockutils [None req-83de388b-5cbc-417e-bfb6-e7c4207a5031 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "264c6900-dbef-455e-95cc-1df73c735cc8" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 9.716s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2233.274588] env[62684]: DEBUG oslo_concurrency.lockutils [None req-02356f11-6b01-4092-a365-32307288b882 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "264c6900-dbef-455e-95cc-1df73c735cc8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.107s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2233.274916] env[62684]: DEBUG oslo_concurrency.lockutils [None req-02356f11-6b01-4092-a365-32307288b882 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "264c6900-dbef-455e-95cc-1df73c735cc8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2233.275161] env[62684]: DEBUG oslo_concurrency.lockutils [None req-02356f11-6b01-4092-a365-32307288b882 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "264c6900-dbef-455e-95cc-1df73c735cc8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2233.275345] env[62684]: DEBUG oslo_concurrency.lockutils [None req-02356f11-6b01-4092-a365-32307288b882 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "264c6900-dbef-455e-95cc-1df73c735cc8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2233.277445] env[62684]: INFO nova.compute.manager [None req-02356f11-6b01-4092-a365-32307288b882 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Terminating instance [ 2233.279254] env[62684]: DEBUG nova.compute.manager [None req-02356f11-6b01-4092-a365-32307288b882 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2233.279515] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-02356f11-6b01-4092-a365-32307288b882 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2233.280435] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05b9b11d-02a2-47b6-a410-2a41539fbdaf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.291515] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-02356f11-6b01-4092-a365-32307288b882 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2233.291760] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3edaa3fe-bc9f-4311-aea5-f2a59b99ee11 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.297235] env[62684]: DEBUG oslo_vmware.api [None req-02356f11-6b01-4092-a365-32307288b882 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2233.297235] env[62684]: value = "task-2053631" [ 2233.297235] env[62684]: _type = "Task" [ 2233.297235] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2233.306892] env[62684]: DEBUG oslo_vmware.api [None req-02356f11-6b01-4092-a365-32307288b882 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053631, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2233.392733] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e26795ba-d665-4b07-bfa5-f1e41f56b088 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.400707] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bfa8464-e70a-40f6-aa96-ff8f7b1a71e6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.430829] env[62684]: INFO nova.compute.manager [None req-73fd32fe-3ce2-4f4e-b3ea-f3e77d469ac3 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Detaching volume cba4160c-2fba-4b43-b9ec-1bef4fb246a1 [ 2233.433798] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d333ed21-c3e1-4657-8532-4b32ad1f0a0a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.443037] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-869c76d0-67ed-49f5-a2bb-581024f7f9d2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.456667] env[62684]: DEBUG nova.compute.provider_tree [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2233.462093] env[62684]: INFO nova.virt.block_device [None req-73fd32fe-3ce2-4f4e-b3ea-f3e77d469ac3 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Attempting to driver detach volume cba4160c-2fba-4b43-b9ec-1bef4fb246a1 from mountpoint /dev/sdc [ 2233.462352] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-73fd32fe-3ce2-4f4e-b3ea-f3e77d469ac3 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Volume detach. Driver type: vmdk {{(pid=62684) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2233.462552] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-73fd32fe-3ce2-4f4e-b3ea-f3e77d469ac3 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421390', 'volume_id': 'cba4160c-2fba-4b43-b9ec-1bef4fb246a1', 'name': 'volume-cba4160c-2fba-4b43-b9ec-1bef4fb246a1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '584845d2-d146-42bf-8ef5-58532fe24f65', 'attached_at': '', 'detached_at': '', 'volume_id': 'cba4160c-2fba-4b43-b9ec-1bef4fb246a1', 'serial': 'cba4160c-2fba-4b43-b9ec-1bef4fb246a1'} {{(pid=62684) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2233.463399] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4cd7860-40d5-42e3-b800-424b575fa736 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.484353] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef20c529-ab7c-4591-8ea9-3d7310b77ee5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.491280] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9762187-8ea5-4002-b4ba-506929a74a13 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.511943] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c7aef9f-ab16-44d5-baa8-1af2bd1881ac {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.528634] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-73fd32fe-3ce2-4f4e-b3ea-f3e77d469ac3 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] The volume has not been displaced from its original location: [datastore2] volume-cba4160c-2fba-4b43-b9ec-1bef4fb246a1/volume-cba4160c-2fba-4b43-b9ec-1bef4fb246a1.vmdk. No consolidation needed. {{(pid=62684) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2233.533960] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-73fd32fe-3ce2-4f4e-b3ea-f3e77d469ac3 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Reconfiguring VM instance instance-0000005e to detach disk 2002 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2233.534337] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-90fc3d28-c282-4c57-9069-0a50a73a9ab5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.551635] env[62684]: DEBUG oslo_vmware.api [None req-73fd32fe-3ce2-4f4e-b3ea-f3e77d469ac3 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Waiting for the task: (returnval){ [ 2233.551635] env[62684]: value = "task-2053632" [ 2233.551635] env[62684]: _type = "Task" [ 2233.551635] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2233.559750] env[62684]: DEBUG oslo_vmware.api [None req-73fd32fe-3ce2-4f4e-b3ea-f3e77d469ac3 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053632, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2233.583885] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2de8c398-60ff-42b1-b591-8e4a3c0eb487 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2233.597424] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2233.632067] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "68ed9549-14ab-4f90-bd78-925f289dc029" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2233.632337] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "68ed9549-14ab-4f90-bd78-925f289dc029" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2233.806568] env[62684]: DEBUG oslo_vmware.api [None req-02356f11-6b01-4092-a365-32307288b882 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053631, 'name': PowerOffVM_Task, 'duration_secs': 0.215416} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2233.806854] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-02356f11-6b01-4092-a365-32307288b882 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2233.807039] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-02356f11-6b01-4092-a365-32307288b882 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2233.807317] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a74acc60-6784-4221-81bd-a9c256c38fc2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.820728] env[62684]: DEBUG nova.compute.manager [req-ab054edb-1554-4a1e-b2d7-23086966d197 req-ce924c5c-95a3-4a24-9779-681cebcd9c13 service nova] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Received event network-vif-deleted-da310d7c-cd12-49ca-8014-efa9469aef45 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2233.960332] env[62684]: DEBUG nova.scheduler.client.report [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2234.062115] env[62684]: DEBUG oslo_vmware.api [None req-73fd32fe-3ce2-4f4e-b3ea-f3e77d469ac3 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053632, 'name': ReconfigVM_Task, 'duration_secs': 0.207469} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2234.062381] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-73fd32fe-3ce2-4f4e-b3ea-f3e77d469ac3 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Reconfigured VM instance instance-0000005e to detach disk 2002 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2234.066927] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-81eddae5-0982-4cfb-8d1d-a84360eba522 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.081941] env[62684]: DEBUG oslo_vmware.api [None req-73fd32fe-3ce2-4f4e-b3ea-f3e77d469ac3 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Waiting for the task: (returnval){ [ 2234.081941] env[62684]: value = "task-2053634" [ 2234.081941] env[62684]: _type = "Task" [ 2234.081941] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2234.089541] env[62684]: DEBUG oslo_vmware.api [None req-73fd32fe-3ce2-4f4e-b3ea-f3e77d469ac3 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053634, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2234.134824] env[62684]: DEBUG nova.compute.manager [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2234.464982] env[62684]: DEBUG oslo_concurrency.lockutils [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.257s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2234.467605] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dc5b920b-5678-438e-8f86-bd9ef5c52e4b tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.751s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2234.467850] env[62684]: DEBUG nova.objects.instance [None req-dc5b920b-5678-438e-8f86-bd9ef5c52e4b tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lazy-loading 'resources' on Instance uuid f037d6b2-2082-4611-985e-b9a077eb8250 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2234.591617] env[62684]: DEBUG oslo_vmware.api [None req-73fd32fe-3ce2-4f4e-b3ea-f3e77d469ac3 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053634, 'name': ReconfigVM_Task, 'duration_secs': 0.14912} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2234.591808] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-73fd32fe-3ce2-4f4e-b3ea-f3e77d469ac3 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421390', 'volume_id': 'cba4160c-2fba-4b43-b9ec-1bef4fb246a1', 'name': 'volume-cba4160c-2fba-4b43-b9ec-1bef4fb246a1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '584845d2-d146-42bf-8ef5-58532fe24f65', 'attached_at': '', 'detached_at': '', 'volume_id': 'cba4160c-2fba-4b43-b9ec-1bef4fb246a1', 'serial': 'cba4160c-2fba-4b43-b9ec-1bef4fb246a1'} {{(pid=62684) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2234.656940] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2234.976159] env[62684]: DEBUG oslo_concurrency.lockutils [None req-96aee448-a0d3-4dd8-82c3-d5d2bf322bff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "02480039-f749-402a-92db-df664304a5bf" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 26.524s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2234.977323] env[62684]: DEBUG oslo_concurrency.lockutils [None req-586fe5e5-a4da-48b6-b1d9-0dc182e474bf tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "02480039-f749-402a-92db-df664304a5bf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 3.004s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2234.977607] env[62684]: DEBUG oslo_concurrency.lockutils [None req-586fe5e5-a4da-48b6-b1d9-0dc182e474bf tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "02480039-f749-402a-92db-df664304a5bf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2234.977774] env[62684]: DEBUG oslo_concurrency.lockutils [None req-586fe5e5-a4da-48b6-b1d9-0dc182e474bf tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "02480039-f749-402a-92db-df664304a5bf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2234.978361] env[62684]: DEBUG oslo_concurrency.lockutils [None req-586fe5e5-a4da-48b6-b1d9-0dc182e474bf tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "02480039-f749-402a-92db-df664304a5bf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2234.979672] env[62684]: INFO nova.compute.manager [None req-586fe5e5-a4da-48b6-b1d9-0dc182e474bf tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Terminating instance [ 2234.981668] env[62684]: DEBUG nova.compute.manager [None req-586fe5e5-a4da-48b6-b1d9-0dc182e474bf tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2234.982043] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-586fe5e5-a4da-48b6-b1d9-0dc182e474bf tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2234.982357] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-877f8d5c-549e-4fb9-a987-f35e9e825d2a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.992541] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1b4713c-bee3-4b37-ab26-a9bb2d55f38f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.026066] env[62684]: WARNING nova.virt.vmwareapi.vmops [None req-586fe5e5-a4da-48b6-b1d9-0dc182e474bf tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 02480039-f749-402a-92db-df664304a5bf could not be found. [ 2235.026324] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-586fe5e5-a4da-48b6-b1d9-0dc182e474bf tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2235.026519] env[62684]: INFO nova.compute.manager [None req-586fe5e5-a4da-48b6-b1d9-0dc182e474bf tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 02480039-f749-402a-92db-df664304a5bf] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2235.026816] env[62684]: DEBUG oslo.service.loopingcall [None req-586fe5e5-a4da-48b6-b1d9-0dc182e474bf tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2235.030654] env[62684]: DEBUG nova.compute.manager [-] [instance: 02480039-f749-402a-92db-df664304a5bf] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2235.030785] env[62684]: DEBUG nova.network.neutron [-] [instance: 02480039-f749-402a-92db-df664304a5bf] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2235.133181] env[62684]: DEBUG nova.objects.instance [None req-73fd32fe-3ce2-4f4e-b3ea-f3e77d469ac3 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lazy-loading 'flavor' on Instance uuid 584845d2-d146-42bf-8ef5-58532fe24f65 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2235.187177] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb7f242f-f352-407a-a048-c36425f74fda {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.195520] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dabd9447-b649-4036-854f-e35f56f6483c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.226234] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef7a7b06-f016-4a24-8449-499ac91c07ca {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.234930] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37c60126-1c6e-4004-a35a-4f7f2b715310 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.248803] env[62684]: DEBUG nova.compute.provider_tree [None req-dc5b920b-5678-438e-8f86-bd9ef5c52e4b tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2235.536775] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ccdf330b-a7f4-43d8-addd-f4a6e61ea7aa tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquiring lock "ca3d1a73-6f3b-4278-8fe7-03b66f407ba6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2235.537168] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ccdf330b-a7f4-43d8-addd-f4a6e61ea7aa tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lock "ca3d1a73-6f3b-4278-8fe7-03b66f407ba6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2235.537447] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ccdf330b-a7f4-43d8-addd-f4a6e61ea7aa tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquiring lock "ca3d1a73-6f3b-4278-8fe7-03b66f407ba6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2235.537650] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ccdf330b-a7f4-43d8-addd-f4a6e61ea7aa tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lock "ca3d1a73-6f3b-4278-8fe7-03b66f407ba6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2235.537873] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ccdf330b-a7f4-43d8-addd-f4a6e61ea7aa tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lock "ca3d1a73-6f3b-4278-8fe7-03b66f407ba6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2235.540296] env[62684]: INFO nova.compute.manager [None req-ccdf330b-a7f4-43d8-addd-f4a6e61ea7aa tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Terminating instance [ 2235.542303] env[62684]: DEBUG nova.compute.manager [None req-ccdf330b-a7f4-43d8-addd-f4a6e61ea7aa tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2235.542511] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ccdf330b-a7f4-43d8-addd-f4a6e61ea7aa tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2235.543360] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f6218f9-400c-4119-8b72-c989dd0fb785 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.551345] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccdf330b-a7f4-43d8-addd-f4a6e61ea7aa tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2235.551607] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cae6da44-16cf-4e9c-b760-f3defbfc3d80 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.557818] env[62684]: DEBUG oslo_vmware.api [None req-ccdf330b-a7f4-43d8-addd-f4a6e61ea7aa tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 2235.557818] env[62684]: value = "task-2053635" [ 2235.557818] env[62684]: _type = "Task" [ 2235.557818] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2235.565275] env[62684]: DEBUG oslo_vmware.api [None req-ccdf330b-a7f4-43d8-addd-f4a6e61ea7aa tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053635, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2235.752802] env[62684]: DEBUG nova.scheduler.client.report [None req-dc5b920b-5678-438e-8f86-bd9ef5c52e4b tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2235.814691] env[62684]: DEBUG nova.network.neutron [-] [instance: 02480039-f749-402a-92db-df664304a5bf] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2236.068731] env[62684]: DEBUG oslo_vmware.api [None req-ccdf330b-a7f4-43d8-addd-f4a6e61ea7aa tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053635, 'name': PowerOffVM_Task, 'duration_secs': 0.26241} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2236.069031] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccdf330b-a7f4-43d8-addd-f4a6e61ea7aa tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2236.069217] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ccdf330b-a7f4-43d8-addd-f4a6e61ea7aa tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2236.069514] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a098bcfb-8c64-4a71-9da9-12d8b0229cdf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.140706] env[62684]: DEBUG oslo_concurrency.lockutils [None req-73fd32fe-3ce2-4f4e-b3ea-f3e77d469ac3 tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "584845d2-d146-42bf-8ef5-58532fe24f65" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.235s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2236.259029] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dc5b920b-5678-438e-8f86-bd9ef5c52e4b tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.791s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2236.261483] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2de8c398-60ff-42b1-b591-8e4a3c0eb487 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.678s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2236.261933] env[62684]: DEBUG nova.objects.instance [None req-2de8c398-60ff-42b1-b591-8e4a3c0eb487 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lazy-loading 'resources' on Instance uuid daf1486b-d5c2-4341-8a27-36eeeb08cd26 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2236.282268] env[62684]: INFO nova.scheduler.client.report [None req-dc5b920b-5678-438e-8f86-bd9ef5c52e4b tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Deleted allocations for instance f037d6b2-2082-4611-985e-b9a077eb8250 [ 2236.317955] env[62684]: INFO nova.compute.manager [-] [instance: 02480039-f749-402a-92db-df664304a5bf] Took 1.29 seconds to deallocate network for instance. [ 2236.789077] env[62684]: DEBUG oslo_concurrency.lockutils [None req-dc5b920b-5678-438e-8f86-bd9ef5c52e4b tempest-AttachInterfacesTestJSON-207820228 tempest-AttachInterfacesTestJSON-207820228-project-member] Lock "f037d6b2-2082-4611-985e-b9a077eb8250" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.004s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2236.927829] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e82b706-12db-4abd-a37f-2579635f528e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.938617] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce620083-b64e-405b-a6f5-bed6a01aa8e6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.973021] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32690c0a-52b6-4837-817f-c66d5aaabe96 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.979519] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a60a928-62a3-4389-bb03-a35b07b48483 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.993489] env[62684]: DEBUG nova.compute.provider_tree [None req-2de8c398-60ff-42b1-b591-8e4a3c0eb487 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2237.264450] env[62684]: DEBUG oslo_concurrency.lockutils [None req-528f94d3-962b-4108-8a04-3eb034fc247e tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Acquiring lock "584845d2-d146-42bf-8ef5-58532fe24f65" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2237.264697] env[62684]: DEBUG oslo_concurrency.lockutils [None req-528f94d3-962b-4108-8a04-3eb034fc247e tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "584845d2-d146-42bf-8ef5-58532fe24f65" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2237.264934] env[62684]: DEBUG oslo_concurrency.lockutils [None req-528f94d3-962b-4108-8a04-3eb034fc247e tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Acquiring lock "584845d2-d146-42bf-8ef5-58532fe24f65-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2237.266267] env[62684]: DEBUG oslo_concurrency.lockutils [None req-528f94d3-962b-4108-8a04-3eb034fc247e tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "584845d2-d146-42bf-8ef5-58532fe24f65-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2237.266538] env[62684]: DEBUG oslo_concurrency.lockutils [None req-528f94d3-962b-4108-8a04-3eb034fc247e tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "584845d2-d146-42bf-8ef5-58532fe24f65-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2237.269301] env[62684]: INFO nova.compute.manager [None req-528f94d3-962b-4108-8a04-3eb034fc247e tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Terminating instance [ 2237.271425] env[62684]: DEBUG nova.compute.manager [None req-528f94d3-962b-4108-8a04-3eb034fc247e tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2237.271581] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-528f94d3-962b-4108-8a04-3eb034fc247e tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2237.272457] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fd32044-ab3e-4c0d-90fe-10bbbe98c838 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.281333] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-528f94d3-962b-4108-8a04-3eb034fc247e tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2237.281955] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8809f0d2-8517-4180-b7b9-649f6d454efd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.288867] env[62684]: DEBUG oslo_vmware.api [None req-528f94d3-962b-4108-8a04-3eb034fc247e tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Waiting for the task: (returnval){ [ 2237.288867] env[62684]: value = "task-2053637" [ 2237.288867] env[62684]: _type = "Task" [ 2237.288867] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2237.300874] env[62684]: DEBUG oslo_vmware.api [None req-528f94d3-962b-4108-8a04-3eb034fc247e tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053637, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2237.346025] env[62684]: DEBUG oslo_concurrency.lockutils [None req-586fe5e5-a4da-48b6-b1d9-0dc182e474bf tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "02480039-f749-402a-92db-df664304a5bf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.369s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2237.496269] env[62684]: DEBUG nova.scheduler.client.report [None req-2de8c398-60ff-42b1-b591-8e4a3c0eb487 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2237.799376] env[62684]: DEBUG oslo_vmware.api [None req-528f94d3-962b-4108-8a04-3eb034fc247e tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053637, 'name': PowerOffVM_Task, 'duration_secs': 0.20348} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2237.799759] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-528f94d3-962b-4108-8a04-3eb034fc247e tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2237.799939] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-528f94d3-962b-4108-8a04-3eb034fc247e tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2237.800261] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ca61a890-cc82-4cb7-a23d-310faa00e67a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.946130] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "c87b2875-ae05-4091-93fe-7b33d4ca864b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2237.946130] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "c87b2875-ae05-4091-93fe-7b33d4ca864b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2238.004021] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2de8c398-60ff-42b1-b591-8e4a3c0eb487 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.740s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2238.004021] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.407s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2238.006205] env[62684]: INFO nova.compute.claims [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2238.031548] env[62684]: INFO nova.scheduler.client.report [None req-2de8c398-60ff-42b1-b591-8e4a3c0eb487 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Deleted allocations for instance daf1486b-d5c2-4341-8a27-36eeeb08cd26 [ 2238.450686] env[62684]: DEBUG nova.compute.manager [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2238.538916] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2de8c398-60ff-42b1-b591-8e4a3c0eb487 tempest-AttachVolumeShelveTestJSON-1531474823 tempest-AttachVolumeShelveTestJSON-1531474823-project-member] Lock "daf1486b-d5c2-4341-8a27-36eeeb08cd26" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.315s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2238.973338] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2239.187292] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e503d542-f348-4091-b08f-eea57e749262 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2239.194742] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6f2261d-7c74-42c0-83da-dae2250f1341 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2239.226014] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f508c77b-124d-47b3-a449-1142200f5800 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2239.234426] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a13db8b3-239f-4fc4-97f6-a976d7d67025 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2239.251257] env[62684]: DEBUG nova.compute.provider_tree [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2239.755517] env[62684]: DEBUG nova.scheduler.client.report [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2239.854699] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ccdf330b-a7f4-43d8-addd-f4a6e61ea7aa tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2239.854958] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ccdf330b-a7f4-43d8-addd-f4a6e61ea7aa tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2239.855175] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccdf330b-a7f4-43d8-addd-f4a6e61ea7aa tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Deleting the datastore file [datastore2] ca3d1a73-6f3b-4278-8fe7-03b66f407ba6 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2239.855484] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d3d001d7-1850-47a5-88f6-7c162355cb7e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2239.864382] env[62684]: DEBUG oslo_vmware.api [None req-ccdf330b-a7f4-43d8-addd-f4a6e61ea7aa tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 2239.864382] env[62684]: value = "task-2053639" [ 2239.864382] env[62684]: _type = "Task" [ 2239.864382] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2239.871272] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-528f94d3-962b-4108-8a04-3eb034fc247e tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2239.871509] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-528f94d3-962b-4108-8a04-3eb034fc247e tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2239.871819] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-528f94d3-962b-4108-8a04-3eb034fc247e tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Deleting the datastore file [datastore2] 584845d2-d146-42bf-8ef5-58532fe24f65 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2239.872041] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-07f7cc15-2ae7-4f1f-bd05-84c04cb679aa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2239.878178] env[62684]: DEBUG oslo_vmware.api [None req-ccdf330b-a7f4-43d8-addd-f4a6e61ea7aa tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053639, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2239.881087] env[62684]: DEBUG oslo_vmware.api [None req-528f94d3-962b-4108-8a04-3eb034fc247e tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Waiting for the task: (returnval){ [ 2239.881087] env[62684]: value = "task-2053640" [ 2239.881087] env[62684]: _type = "Task" [ 2239.881087] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2239.889341] env[62684]: DEBUG oslo_vmware.api [None req-528f94d3-962b-4108-8a04-3eb034fc247e tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053640, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2240.204687] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-02356f11-6b01-4092-a365-32307288b882 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2240.205075] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-02356f11-6b01-4092-a365-32307288b882 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2240.205346] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-02356f11-6b01-4092-a365-32307288b882 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Deleting the datastore file [datastore1] 264c6900-dbef-455e-95cc-1df73c735cc8 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2240.205721] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f8380888-ca30-4ef1-b642-446b5be3b3dc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2240.213318] env[62684]: DEBUG oslo_vmware.api [None req-02356f11-6b01-4092-a365-32307288b882 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2240.213318] env[62684]: value = "task-2053642" [ 2240.213318] env[62684]: _type = "Task" [ 2240.213318] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2240.222340] env[62684]: DEBUG oslo_vmware.api [None req-02356f11-6b01-4092-a365-32307288b882 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053642, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2240.259530] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.255s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2240.260158] env[62684]: DEBUG nova.compute.manager [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2240.263188] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.606s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2240.264982] env[62684]: INFO nova.compute.claims [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2240.375033] env[62684]: DEBUG oslo_vmware.api [None req-ccdf330b-a7f4-43d8-addd-f4a6e61ea7aa tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053639, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.209713} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2240.375318] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccdf330b-a7f4-43d8-addd-f4a6e61ea7aa tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2240.375510] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ccdf330b-a7f4-43d8-addd-f4a6e61ea7aa tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2240.375696] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ccdf330b-a7f4-43d8-addd-f4a6e61ea7aa tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2240.375874] env[62684]: INFO nova.compute.manager [None req-ccdf330b-a7f4-43d8-addd-f4a6e61ea7aa tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Took 4.83 seconds to destroy the instance on the hypervisor. [ 2240.376131] env[62684]: DEBUG oslo.service.loopingcall [None req-ccdf330b-a7f4-43d8-addd-f4a6e61ea7aa tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2240.376338] env[62684]: DEBUG nova.compute.manager [-] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2240.376434] env[62684]: DEBUG nova.network.neutron [-] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2240.392328] env[62684]: DEBUG oslo_vmware.api [None req-528f94d3-962b-4108-8a04-3eb034fc247e tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Task: {'id': task-2053640, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14672} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2240.392457] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-528f94d3-962b-4108-8a04-3eb034fc247e tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2240.392672] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-528f94d3-962b-4108-8a04-3eb034fc247e tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2240.392840] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-528f94d3-962b-4108-8a04-3eb034fc247e tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2240.394051] env[62684]: INFO nova.compute.manager [None req-528f94d3-962b-4108-8a04-3eb034fc247e tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Took 3.12 seconds to destroy the instance on the hypervisor. [ 2240.394051] env[62684]: DEBUG oslo.service.loopingcall [None req-528f94d3-962b-4108-8a04-3eb034fc247e tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2240.394051] env[62684]: DEBUG nova.compute.manager [-] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2240.394051] env[62684]: DEBUG nova.network.neutron [-] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2240.724176] env[62684]: DEBUG oslo_vmware.api [None req-02356f11-6b01-4092-a365-32307288b882 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053642, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.195651} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2240.724458] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-02356f11-6b01-4092-a365-32307288b882 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2240.724853] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-02356f11-6b01-4092-a365-32307288b882 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2240.724853] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-02356f11-6b01-4092-a365-32307288b882 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2240.725018] env[62684]: INFO nova.compute.manager [None req-02356f11-6b01-4092-a365-32307288b882 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Took 7.45 seconds to destroy the instance on the hypervisor. [ 2240.725274] env[62684]: DEBUG oslo.service.loopingcall [None req-02356f11-6b01-4092-a365-32307288b882 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2240.725480] env[62684]: DEBUG nova.compute.manager [-] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2240.725578] env[62684]: DEBUG nova.network.neutron [-] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2240.771671] env[62684]: DEBUG nova.compute.utils [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2240.774631] env[62684]: DEBUG nova.compute.manager [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2240.777364] env[62684]: DEBUG nova.network.neutron [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2240.825240] env[62684]: DEBUG nova.compute.manager [req-32527ab4-ad17-450c-8697-f0ebfbf92236 req-432c531e-809a-46c2-9bf2-892b70b2ec83 service nova] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Received event network-vif-deleted-1ebe50b2-d6ab-48aa-b581-d2d09b588552 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2240.825466] env[62684]: INFO nova.compute.manager [req-32527ab4-ad17-450c-8697-f0ebfbf92236 req-432c531e-809a-46c2-9bf2-892b70b2ec83 service nova] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Neutron deleted interface 1ebe50b2-d6ab-48aa-b581-d2d09b588552; detaching it from the instance and deleting it from the info cache [ 2240.825647] env[62684]: DEBUG nova.network.neutron [req-32527ab4-ad17-450c-8697-f0ebfbf92236 req-432c531e-809a-46c2-9bf2-892b70b2ec83 service nova] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2240.861980] env[62684]: DEBUG nova.policy [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f2cd375ad4a34027b34407d99993b084', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f0e0f0e1dc834134913bd742fa99b52f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2241.190917] env[62684]: DEBUG nova.compute.manager [req-80447e7f-dff2-4776-9e70-61a6659c876a req-f33b1ab3-5f10-45c4-9515-f5f9f9dafd42 service nova] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Received event network-vif-deleted-34ae3a7e-83a9-4ebd-8582-bb73f3050948 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2241.190917] env[62684]: INFO nova.compute.manager [req-80447e7f-dff2-4776-9e70-61a6659c876a req-f33b1ab3-5f10-45c4-9515-f5f9f9dafd42 service nova] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Neutron deleted interface 34ae3a7e-83a9-4ebd-8582-bb73f3050948; detaching it from the instance and deleting it from the info cache [ 2241.190917] env[62684]: DEBUG nova.network.neutron [req-80447e7f-dff2-4776-9e70-61a6659c876a req-f33b1ab3-5f10-45c4-9515-f5f9f9dafd42 service nova] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2241.270869] env[62684]: DEBUG nova.network.neutron [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Successfully created port: eea0a099-12ed-4dc1-a3b4-01695e9e14e9 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2241.274985] env[62684]: DEBUG nova.compute.manager [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2241.302714] env[62684]: DEBUG nova.network.neutron [-] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2241.329569] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6c90871b-d05c-44b5-83c9-3ac9580ac232 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.350691] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ee19f64-8143-4d8e-8235-e2498c09c33f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.385725] env[62684]: DEBUG nova.compute.manager [req-32527ab4-ad17-450c-8697-f0ebfbf92236 req-432c531e-809a-46c2-9bf2-892b70b2ec83 service nova] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Detach interface failed, port_id=1ebe50b2-d6ab-48aa-b581-d2d09b588552, reason: Instance ca3d1a73-6f3b-4278-8fe7-03b66f407ba6 could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2241.469564] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f80e7af-f3d1-495b-a453-5659eca33782 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.477865] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc6134a7-f48f-4218-a367-aa27c77888d7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.480933] env[62684]: DEBUG nova.network.neutron [-] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2241.512800] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e521307-5061-4879-9118-47ba3ca9c0c9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.522487] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1261cee4-7b14-4354-a650-51d750d04594 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.538135] env[62684]: DEBUG nova.compute.provider_tree [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2241.660340] env[62684]: DEBUG nova.network.neutron [-] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2241.693278] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-92afd76c-8a83-4535-bca4-b0defc8af4c4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.706143] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff147b89-f8bc-48f3-a1a5-3b5ea61d1abc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.746675] env[62684]: DEBUG nova.compute.manager [req-80447e7f-dff2-4776-9e70-61a6659c876a req-f33b1ab3-5f10-45c4-9515-f5f9f9dafd42 service nova] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Detach interface failed, port_id=34ae3a7e-83a9-4ebd-8582-bb73f3050948, reason: Instance 264c6900-dbef-455e-95cc-1df73c735cc8 could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2241.807992] env[62684]: INFO nova.compute.manager [-] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Took 1.43 seconds to deallocate network for instance. [ 2241.983748] env[62684]: INFO nova.compute.manager [-] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Took 1.59 seconds to deallocate network for instance. [ 2242.041704] env[62684]: DEBUG nova.scheduler.client.report [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2242.165172] env[62684]: INFO nova.compute.manager [-] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Took 1.44 seconds to deallocate network for instance. [ 2242.284544] env[62684]: DEBUG nova.compute.manager [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2242.309626] env[62684]: DEBUG nova.virt.hardware [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2242.310256] env[62684]: DEBUG nova.virt.hardware [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2242.310500] env[62684]: DEBUG nova.virt.hardware [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2242.310744] env[62684]: DEBUG nova.virt.hardware [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2242.310941] env[62684]: DEBUG nova.virt.hardware [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2242.311137] env[62684]: DEBUG nova.virt.hardware [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2242.311387] env[62684]: DEBUG nova.virt.hardware [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2242.311560] env[62684]: DEBUG nova.virt.hardware [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2242.311753] env[62684]: DEBUG nova.virt.hardware [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2242.311902] env[62684]: DEBUG nova.virt.hardware [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2242.312096] env[62684]: DEBUG nova.virt.hardware [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2242.312967] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8772e90-18e9-467a-ada0-aa45139e2814 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2242.316690] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ccdf330b-a7f4-43d8-addd-f4a6e61ea7aa tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2242.321365] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e0469f4-b29a-4dad-97fe-79625498df98 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2242.397774] env[62684]: INFO nova.compute.manager [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Rebuilding instance [ 2242.440567] env[62684]: DEBUG nova.compute.manager [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2242.441508] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce6e91de-cc03-434d-81fe-6654dfe586fc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2242.492198] env[62684]: DEBUG oslo_concurrency.lockutils [None req-528f94d3-962b-4108-8a04-3eb034fc247e tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2242.545928] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.283s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2242.546461] env[62684]: DEBUG nova.compute.manager [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2242.549093] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.576s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2242.550481] env[62684]: INFO nova.compute.claims [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2242.672392] env[62684]: DEBUG oslo_concurrency.lockutils [None req-02356f11-6b01-4092-a365-32307288b882 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2242.857129] env[62684]: DEBUG nova.compute.manager [req-7eb1d536-e93b-4186-a9eb-7580ec136cc5 req-9d815d6c-ac66-443f-8efa-703c5f2f735d service nova] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Received event network-vif-deleted-4424c4bf-2ffd-4b4a-89f5-b6e9c9faa84b {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2242.952593] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2242.952927] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-65a0bf59-5bd2-4d76-88b4-8e7d15d4a559 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2242.961315] env[62684]: DEBUG oslo_vmware.api [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2242.961315] env[62684]: value = "task-2053643" [ 2242.961315] env[62684]: _type = "Task" [ 2242.961315] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2242.971205] env[62684]: DEBUG oslo_vmware.api [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053643, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2243.053997] env[62684]: DEBUG nova.network.neutron [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Successfully updated port: eea0a099-12ed-4dc1-a3b4-01695e9e14e9 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2243.054772] env[62684]: DEBUG nova.compute.utils [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2243.058289] env[62684]: DEBUG nova.compute.manager [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2243.058468] env[62684]: DEBUG nova.network.neutron [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2243.138829] env[62684]: DEBUG nova.policy [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a3800d71923848db8635de9a8a2ff9f6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '76d88ac878d44480b3b54b24ab87efa9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2243.219993] env[62684]: DEBUG nova.compute.manager [req-1fa5de00-3a9f-4e28-b6ec-94156b5f0597 req-622ab73c-7167-438d-8497-7b39eaf3909d service nova] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Received event network-vif-plugged-eea0a099-12ed-4dc1-a3b4-01695e9e14e9 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2243.220224] env[62684]: DEBUG oslo_concurrency.lockutils [req-1fa5de00-3a9f-4e28-b6ec-94156b5f0597 req-622ab73c-7167-438d-8497-7b39eaf3909d service nova] Acquiring lock "0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2243.220439] env[62684]: DEBUG oslo_concurrency.lockutils [req-1fa5de00-3a9f-4e28-b6ec-94156b5f0597 req-622ab73c-7167-438d-8497-7b39eaf3909d service nova] Lock "0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2243.220728] env[62684]: DEBUG oslo_concurrency.lockutils [req-1fa5de00-3a9f-4e28-b6ec-94156b5f0597 req-622ab73c-7167-438d-8497-7b39eaf3909d service nova] Lock "0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2243.220932] env[62684]: DEBUG nova.compute.manager [req-1fa5de00-3a9f-4e28-b6ec-94156b5f0597 req-622ab73c-7167-438d-8497-7b39eaf3909d service nova] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] No waiting events found dispatching network-vif-plugged-eea0a099-12ed-4dc1-a3b4-01695e9e14e9 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2243.221125] env[62684]: WARNING nova.compute.manager [req-1fa5de00-3a9f-4e28-b6ec-94156b5f0597 req-622ab73c-7167-438d-8497-7b39eaf3909d service nova] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Received unexpected event network-vif-plugged-eea0a099-12ed-4dc1-a3b4-01695e9e14e9 for instance with vm_state building and task_state spawning. [ 2243.221295] env[62684]: DEBUG nova.compute.manager [req-1fa5de00-3a9f-4e28-b6ec-94156b5f0597 req-622ab73c-7167-438d-8497-7b39eaf3909d service nova] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Received event network-changed-eea0a099-12ed-4dc1-a3b4-01695e9e14e9 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2243.221457] env[62684]: DEBUG nova.compute.manager [req-1fa5de00-3a9f-4e28-b6ec-94156b5f0597 req-622ab73c-7167-438d-8497-7b39eaf3909d service nova] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Refreshing instance network info cache due to event network-changed-eea0a099-12ed-4dc1-a3b4-01695e9e14e9. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2243.221646] env[62684]: DEBUG oslo_concurrency.lockutils [req-1fa5de00-3a9f-4e28-b6ec-94156b5f0597 req-622ab73c-7167-438d-8497-7b39eaf3909d service nova] Acquiring lock "refresh_cache-0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2243.221930] env[62684]: DEBUG oslo_concurrency.lockutils [req-1fa5de00-3a9f-4e28-b6ec-94156b5f0597 req-622ab73c-7167-438d-8497-7b39eaf3909d service nova] Acquired lock "refresh_cache-0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2243.222068] env[62684]: DEBUG nova.network.neutron [req-1fa5de00-3a9f-4e28-b6ec-94156b5f0597 req-622ab73c-7167-438d-8497-7b39eaf3909d service nova] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Refreshing network info cache for port eea0a099-12ed-4dc1-a3b4-01695e9e14e9 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2243.406915] env[62684]: DEBUG nova.network.neutron [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Successfully created port: 22389f73-0c07-4171-893b-941798deca07 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2243.472501] env[62684]: DEBUG oslo_vmware.api [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053643, 'name': PowerOffVM_Task, 'duration_secs': 0.190296} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2243.472802] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2243.473093] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2243.473796] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31445681-de6c-4133-8130-f93a6fff5c6e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.482334] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2243.482507] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-95579d31-6cfa-4716-a2bc-8225335bd91a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.559249] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Acquiring lock "refresh_cache-0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2243.559997] env[62684]: DEBUG nova.compute.manager [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2243.627613] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2243.627864] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2243.628058] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Deleting the datastore file [datastore1] ba12fa9a-10e3-4624-98b5-4ff7365e1940 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2243.628349] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b8f7cb22-f52e-4994-8839-e69bdbb6f375 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.636520] env[62684]: DEBUG oslo_vmware.api [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2243.636520] env[62684]: value = "task-2053645" [ 2243.636520] env[62684]: _type = "Task" [ 2243.636520] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2243.650987] env[62684]: DEBUG oslo_vmware.api [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053645, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2243.758130] env[62684]: DEBUG nova.network.neutron [req-1fa5de00-3a9f-4e28-b6ec-94156b5f0597 req-622ab73c-7167-438d-8497-7b39eaf3909d service nova] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2243.765017] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa2acec2-de3d-4c53-afa8-1e668a47e320 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.771675] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75734bc7-58b8-4a1d-8e0e-d5e2650393d8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.805899] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52971dd4-c91b-4c48-9db8-f28a11e5e027 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.817992] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12576e62-5db0-4146-9225-b35db6297a82 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.829997] env[62684]: DEBUG nova.compute.provider_tree [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2243.845974] env[62684]: DEBUG nova.network.neutron [req-1fa5de00-3a9f-4e28-b6ec-94156b5f0597 req-622ab73c-7167-438d-8497-7b39eaf3909d service nova] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2244.146894] env[62684]: DEBUG oslo_vmware.api [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053645, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132396} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2244.148280] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2244.148505] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2244.148697] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2244.332890] env[62684]: DEBUG nova.scheduler.client.report [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2244.351356] env[62684]: DEBUG oslo_concurrency.lockutils [req-1fa5de00-3a9f-4e28-b6ec-94156b5f0597 req-622ab73c-7167-438d-8497-7b39eaf3909d service nova] Releasing lock "refresh_cache-0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2244.351686] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Acquired lock "refresh_cache-0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2244.351970] env[62684]: DEBUG nova.network.neutron [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2244.571457] env[62684]: DEBUG nova.compute.manager [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2244.598408] env[62684]: DEBUG nova.virt.hardware [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2244.598672] env[62684]: DEBUG nova.virt.hardware [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2244.598835] env[62684]: DEBUG nova.virt.hardware [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2244.599037] env[62684]: DEBUG nova.virt.hardware [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2244.599195] env[62684]: DEBUG nova.virt.hardware [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2244.599349] env[62684]: DEBUG nova.virt.hardware [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2244.599604] env[62684]: DEBUG nova.virt.hardware [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2244.599839] env[62684]: DEBUG nova.virt.hardware [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2244.600092] env[62684]: DEBUG nova.virt.hardware [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2244.600316] env[62684]: DEBUG nova.virt.hardware [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2244.600551] env[62684]: DEBUG nova.virt.hardware [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2244.601473] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b535f780-e716-47cb-bf30-cc96b3119064 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.610160] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db7674f3-4a0d-4526-bba3-c1c417eab959 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.838087] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.289s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2244.838655] env[62684]: DEBUG nova.compute.manager [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2244.841233] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ccdf330b-a7f4-43d8-addd-f4a6e61ea7aa tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.525s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2244.841462] env[62684]: DEBUG nova.objects.instance [None req-ccdf330b-a7f4-43d8-addd-f4a6e61ea7aa tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lazy-loading 'resources' on Instance uuid ca3d1a73-6f3b-4278-8fe7-03b66f407ba6 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2244.905433] env[62684]: DEBUG nova.network.neutron [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2245.144261] env[62684]: DEBUG nova.network.neutron [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Successfully updated port: 22389f73-0c07-4171-893b-941798deca07 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2245.170816] env[62684]: DEBUG nova.network.neutron [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Updating instance_info_cache with network_info: [{"id": "eea0a099-12ed-4dc1-a3b4-01695e9e14e9", "address": "fa:16:3e:80:e9:0d", "network": {"id": "4142ba34-c2e0-4a22-a8dd-be06ba98c6e5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1627792019-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f0e0f0e1dc834134913bd742fa99b52f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeea0a099-12", "ovs_interfaceid": "eea0a099-12ed-4dc1-a3b4-01695e9e14e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2245.183551] env[62684]: DEBUG nova.virt.hardware [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2245.183818] env[62684]: DEBUG nova.virt.hardware [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2245.183985] env[62684]: DEBUG nova.virt.hardware [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2245.184200] env[62684]: DEBUG nova.virt.hardware [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2245.184408] env[62684]: DEBUG nova.virt.hardware [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2245.184498] env[62684]: DEBUG nova.virt.hardware [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2245.184922] env[62684]: DEBUG nova.virt.hardware [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2245.184922] env[62684]: DEBUG nova.virt.hardware [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2245.185053] env[62684]: DEBUG nova.virt.hardware [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2245.185289] env[62684]: DEBUG nova.virt.hardware [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2245.185408] env[62684]: DEBUG nova.virt.hardware [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2245.186297] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7757405e-cde9-438a-b9fb-eabe0ccd6830 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.196118] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05709225-f53c-4e7a-b6ad-ac23099bc669 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.211251] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1c:c6:37', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92e4d027-e755-417b-8eea-9a8f24b85140', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0d17b2ea-8e17-456b-87e2-1e2bec93f187', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2245.218855] env[62684]: DEBUG oslo.service.loopingcall [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2245.218996] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2245.219427] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8c97a61b-e59f-4f33-9bfd-754ea79d3543 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.241746] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2245.241746] env[62684]: value = "task-2053646" [ 2245.241746] env[62684]: _type = "Task" [ 2245.241746] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2245.250251] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053646, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2245.252966] env[62684]: DEBUG nova.compute.manager [req-67079a95-53cc-4196-94e3-799eed2fc2d6 req-70c92e0b-d1cf-4178-af90-4ddf98e1969a service nova] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Received event network-vif-plugged-22389f73-0c07-4171-893b-941798deca07 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2245.252966] env[62684]: DEBUG oslo_concurrency.lockutils [req-67079a95-53cc-4196-94e3-799eed2fc2d6 req-70c92e0b-d1cf-4178-af90-4ddf98e1969a service nova] Acquiring lock "68ed9549-14ab-4f90-bd78-925f289dc029-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2245.252966] env[62684]: DEBUG oslo_concurrency.lockutils [req-67079a95-53cc-4196-94e3-799eed2fc2d6 req-70c92e0b-d1cf-4178-af90-4ddf98e1969a service nova] Lock "68ed9549-14ab-4f90-bd78-925f289dc029-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2245.252966] env[62684]: DEBUG oslo_concurrency.lockutils [req-67079a95-53cc-4196-94e3-799eed2fc2d6 req-70c92e0b-d1cf-4178-af90-4ddf98e1969a service nova] Lock "68ed9549-14ab-4f90-bd78-925f289dc029-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2245.253223] env[62684]: DEBUG nova.compute.manager [req-67079a95-53cc-4196-94e3-799eed2fc2d6 req-70c92e0b-d1cf-4178-af90-4ddf98e1969a service nova] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] No waiting events found dispatching network-vif-plugged-22389f73-0c07-4171-893b-941798deca07 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2245.253223] env[62684]: WARNING nova.compute.manager [req-67079a95-53cc-4196-94e3-799eed2fc2d6 req-70c92e0b-d1cf-4178-af90-4ddf98e1969a service nova] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Received unexpected event network-vif-plugged-22389f73-0c07-4171-893b-941798deca07 for instance with vm_state building and task_state spawning. [ 2245.253375] env[62684]: DEBUG nova.compute.manager [req-67079a95-53cc-4196-94e3-799eed2fc2d6 req-70c92e0b-d1cf-4178-af90-4ddf98e1969a service nova] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Received event network-changed-22389f73-0c07-4171-893b-941798deca07 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2245.253532] env[62684]: DEBUG nova.compute.manager [req-67079a95-53cc-4196-94e3-799eed2fc2d6 req-70c92e0b-d1cf-4178-af90-4ddf98e1969a service nova] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Refreshing instance network info cache due to event network-changed-22389f73-0c07-4171-893b-941798deca07. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2245.253936] env[62684]: DEBUG oslo_concurrency.lockutils [req-67079a95-53cc-4196-94e3-799eed2fc2d6 req-70c92e0b-d1cf-4178-af90-4ddf98e1969a service nova] Acquiring lock "refresh_cache-68ed9549-14ab-4f90-bd78-925f289dc029" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2245.253936] env[62684]: DEBUG oslo_concurrency.lockutils [req-67079a95-53cc-4196-94e3-799eed2fc2d6 req-70c92e0b-d1cf-4178-af90-4ddf98e1969a service nova] Acquired lock "refresh_cache-68ed9549-14ab-4f90-bd78-925f289dc029" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2245.254090] env[62684]: DEBUG nova.network.neutron [req-67079a95-53cc-4196-94e3-799eed2fc2d6 req-70c92e0b-d1cf-4178-af90-4ddf98e1969a service nova] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Refreshing network info cache for port 22389f73-0c07-4171-893b-941798deca07 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2245.345122] env[62684]: DEBUG nova.compute.utils [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2245.349944] env[62684]: DEBUG nova.compute.manager [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2245.349944] env[62684]: DEBUG nova.network.neutron [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2245.393921] env[62684]: DEBUG nova.policy [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '96b96927115d49f2a04342784717e58e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '607a0aa1049640d882d7dd490f5f98ea', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2245.515319] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49e61d51-0f26-4142-b01b-863cddd889da {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.524047] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41173135-56f0-4f7a-a83c-d489ddac2685 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.558495] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ef52f14-41be-437f-b8b2-46943f8c3288 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.567335] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0b8b547-9658-4173-98ce-16522b9c4a21 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.582504] env[62684]: DEBUG nova.compute.provider_tree [None req-ccdf330b-a7f4-43d8-addd-f4a6e61ea7aa tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2245.647556] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "refresh_cache-68ed9549-14ab-4f90-bd78-925f289dc029" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2245.673871] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Releasing lock "refresh_cache-0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2245.674267] env[62684]: DEBUG nova.compute.manager [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Instance network_info: |[{"id": "eea0a099-12ed-4dc1-a3b4-01695e9e14e9", "address": "fa:16:3e:80:e9:0d", "network": {"id": "4142ba34-c2e0-4a22-a8dd-be06ba98c6e5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1627792019-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f0e0f0e1dc834134913bd742fa99b52f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeea0a099-12", "ovs_interfaceid": "eea0a099-12ed-4dc1-a3b4-01695e9e14e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2245.674855] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:80:e9:0d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f2e45023-22b5-458b-826e-9b7eb69ba028', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eea0a099-12ed-4dc1-a3b4-01695e9e14e9', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2245.685793] env[62684]: DEBUG oslo.service.loopingcall [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2245.685793] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2245.685793] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-01a1abc2-d199-456f-a11c-ba5ebe1efb31 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.705598] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2245.705598] env[62684]: value = "task-2053647" [ 2245.705598] env[62684]: _type = "Task" [ 2245.705598] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2245.709464] env[62684]: DEBUG nova.network.neutron [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Successfully created port: e2e433b0-9c1a-4fe6-bc77-e1e5416b042a {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2245.716690] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053647, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2245.756021] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053646, 'name': CreateVM_Task, 'duration_secs': 0.44053} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2245.756021] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2245.756021] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2245.756021] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2245.756021] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2245.756021] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21ea5530-541a-493d-8115-e825b36855f7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.765886] env[62684]: DEBUG oslo_vmware.api [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2245.765886] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d299b8-fe91-4313-feb9-fd1febf6ebc4" [ 2245.765886] env[62684]: _type = "Task" [ 2245.765886] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2245.781666] env[62684]: DEBUG oslo_vmware.api [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d299b8-fe91-4313-feb9-fd1febf6ebc4, 'name': SearchDatastore_Task, 'duration_secs': 0.012563} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2245.782148] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2245.782487] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2245.782920] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2245.783182] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2245.783521] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2245.783889] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7f98edc9-f07c-459f-bcf4-db00e22aff28 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.792679] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2245.792863] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2245.793594] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-180d5360-7142-4a28-8f0d-fffa481bb830 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.799227] env[62684]: DEBUG oslo_vmware.api [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2245.799227] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ba616f-0bfd-5941-1a1d-c9637ed4976c" [ 2245.799227] env[62684]: _type = "Task" [ 2245.799227] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2245.800076] env[62684]: DEBUG nova.network.neutron [req-67079a95-53cc-4196-94e3-799eed2fc2d6 req-70c92e0b-d1cf-4178-af90-4ddf98e1969a service nova] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2245.813768] env[62684]: DEBUG oslo_vmware.api [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ba616f-0bfd-5941-1a1d-c9637ed4976c, 'name': SearchDatastore_Task, 'duration_secs': 0.010907} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2245.814619] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1add5dda-705f-45ea-a30b-6d36e774e710 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.820146] env[62684]: DEBUG oslo_vmware.api [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2245.820146] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f4c3bc-1db5-badd-feaf-28e0bf4c2df4" [ 2245.820146] env[62684]: _type = "Task" [ 2245.820146] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2245.831551] env[62684]: DEBUG oslo_vmware.api [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f4c3bc-1db5-badd-feaf-28e0bf4c2df4, 'name': SearchDatastore_Task, 'duration_secs': 0.008816} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2245.831801] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2245.832065] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] ba12fa9a-10e3-4624-98b5-4ff7365e1940/ba12fa9a-10e3-4624-98b5-4ff7365e1940.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2245.832468] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5117d660-4a04-42b6-ba01-99b3906a1d9e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.838904] env[62684]: DEBUG oslo_vmware.api [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2245.838904] env[62684]: value = "task-2053648" [ 2245.838904] env[62684]: _type = "Task" [ 2245.838904] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2245.847325] env[62684]: DEBUG oslo_vmware.api [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053648, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2245.849964] env[62684]: DEBUG nova.compute.manager [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2245.930561] env[62684]: DEBUG nova.network.neutron [req-67079a95-53cc-4196-94e3-799eed2fc2d6 req-70c92e0b-d1cf-4178-af90-4ddf98e1969a service nova] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2246.086029] env[62684]: DEBUG nova.scheduler.client.report [None req-ccdf330b-a7f4-43d8-addd-f4a6e61ea7aa tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2246.219721] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053647, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2246.350115] env[62684]: DEBUG oslo_vmware.api [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053648, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.470612} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2246.350402] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] ba12fa9a-10e3-4624-98b5-4ff7365e1940/ba12fa9a-10e3-4624-98b5-4ff7365e1940.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2246.350637] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2246.350922] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b7f3ad19-612e-413c-b343-02c74999aed0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.359230] env[62684]: DEBUG oslo_vmware.api [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2246.359230] env[62684]: value = "task-2053649" [ 2246.359230] env[62684]: _type = "Task" [ 2246.359230] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2246.372420] env[62684]: DEBUG oslo_vmware.api [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053649, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2246.433897] env[62684]: DEBUG oslo_concurrency.lockutils [req-67079a95-53cc-4196-94e3-799eed2fc2d6 req-70c92e0b-d1cf-4178-af90-4ddf98e1969a service nova] Releasing lock "refresh_cache-68ed9549-14ab-4f90-bd78-925f289dc029" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2246.434488] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquired lock "refresh_cache-68ed9549-14ab-4f90-bd78-925f289dc029" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2246.434654] env[62684]: DEBUG nova.network.neutron [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2246.594025] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ccdf330b-a7f4-43d8-addd-f4a6e61ea7aa tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.750s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2246.594679] env[62684]: DEBUG oslo_concurrency.lockutils [None req-528f94d3-962b-4108-8a04-3eb034fc247e tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.102s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2246.595367] env[62684]: DEBUG nova.objects.instance [None req-528f94d3-962b-4108-8a04-3eb034fc247e tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lazy-loading 'resources' on Instance uuid 584845d2-d146-42bf-8ef5-58532fe24f65 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2246.614489] env[62684]: INFO nova.scheduler.client.report [None req-ccdf330b-a7f4-43d8-addd-f4a6e61ea7aa tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Deleted allocations for instance ca3d1a73-6f3b-4278-8fe7-03b66f407ba6 [ 2246.721636] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053647, 'name': CreateVM_Task, 'duration_secs': 0.627035} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2246.721636] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2246.722470] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2246.722716] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2246.723277] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2246.723620] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee1f95c3-85f9-4002-b275-bba7a49b0327 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.733141] env[62684]: DEBUG oslo_vmware.api [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for the task: (returnval){ [ 2246.733141] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5215cba6-863b-a9fb-c1be-ee059d8235c4" [ 2246.733141] env[62684]: _type = "Task" [ 2246.733141] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2246.747826] env[62684]: DEBUG oslo_vmware.api [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5215cba6-863b-a9fb-c1be-ee059d8235c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2246.862559] env[62684]: DEBUG nova.compute.manager [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2246.876523] env[62684]: DEBUG oslo_vmware.api [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053649, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067477} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2246.877411] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2246.878397] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a516fd60-887d-4fc3-bdd3-b6fc8a61c72e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.902715] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] ba12fa9a-10e3-4624-98b5-4ff7365e1940/ba12fa9a-10e3-4624-98b5-4ff7365e1940.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2246.904631] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-594d7c7b-3421-4bae-8db6-8434f01b4189 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.927045] env[62684]: DEBUG nova.virt.hardware [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2246.927326] env[62684]: DEBUG nova.virt.hardware [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2246.930825] env[62684]: DEBUG nova.virt.hardware [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2246.930825] env[62684]: DEBUG nova.virt.hardware [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2246.930825] env[62684]: DEBUG nova.virt.hardware [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2246.930825] env[62684]: DEBUG nova.virt.hardware [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2246.930825] env[62684]: DEBUG nova.virt.hardware [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2246.930825] env[62684]: DEBUG nova.virt.hardware [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2246.930825] env[62684]: DEBUG nova.virt.hardware [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2246.930825] env[62684]: DEBUG nova.virt.hardware [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2246.930825] env[62684]: DEBUG nova.virt.hardware [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2246.930825] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b502b8dd-ebd1-4022-8c48-6eb9622ad3b5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.936803] env[62684]: DEBUG oslo_vmware.api [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2246.936803] env[62684]: value = "task-2053650" [ 2246.936803] env[62684]: _type = "Task" [ 2246.936803] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2246.946469] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4435776-5d4b-4eba-98ac-98b2c77ee35d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.953380] env[62684]: DEBUG oslo_vmware.api [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053650, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2246.971015] env[62684]: DEBUG nova.network.neutron [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2247.122731] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ccdf330b-a7f4-43d8-addd-f4a6e61ea7aa tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lock "ca3d1a73-6f3b-4278-8fe7-03b66f407ba6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.586s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2247.136527] env[62684]: DEBUG nova.network.neutron [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Updating instance_info_cache with network_info: [{"id": "22389f73-0c07-4171-893b-941798deca07", "address": "fa:16:3e:b1:05:0d", "network": {"id": "7678b347-6a54-4b84-9a4d-b566bbeb1ea4", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-51664912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76d88ac878d44480b3b54b24ab87efa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22389f73-0c", "ovs_interfaceid": "22389f73-0c07-4171-893b-941798deca07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2247.246645] env[62684]: DEBUG oslo_vmware.api [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5215cba6-863b-a9fb-c1be-ee059d8235c4, 'name': SearchDatastore_Task, 'duration_secs': 0.098211} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2247.246645] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2247.246645] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2247.246961] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2247.247167] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2247.247395] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2247.248078] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fba570d7-b62a-4535-864d-2a8f57d28669 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.259465] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2247.259700] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2247.260705] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b50381d-ec0a-48c3-a63e-b28ed0de0693 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.266229] env[62684]: DEBUG oslo_vmware.api [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for the task: (returnval){ [ 2247.266229] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52dfe8d8-34fd-6902-f32d-97c19b49b453" [ 2247.266229] env[62684]: _type = "Task" [ 2247.266229] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2247.277281] env[62684]: DEBUG oslo_vmware.api [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52dfe8d8-34fd-6902-f32d-97c19b49b453, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2247.284306] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdbeddab-1011-467f-acf0-400c5baf60bb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.292038] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e03ca00e-9ab4-48d5-b635-e45279c47e4b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.328772] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ecbffff-804b-4721-9045-bfff0c3cd53c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.336746] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5de8e62-d129-4d55-a916-4dc38531f4db {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.355295] env[62684]: DEBUG nova.compute.provider_tree [None req-528f94d3-962b-4108-8a04-3eb034fc247e tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2247.452421] env[62684]: DEBUG oslo_vmware.api [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053650, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2247.605759] env[62684]: DEBUG nova.compute.manager [req-12a7425c-a5df-492a-9372-1396b320abfb req-252b9fbf-1ecd-4618-a256-a8dfd2e93744 service nova] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Received event network-vif-plugged-e2e433b0-9c1a-4fe6-bc77-e1e5416b042a {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2247.605759] env[62684]: DEBUG oslo_concurrency.lockutils [req-12a7425c-a5df-492a-9372-1396b320abfb req-252b9fbf-1ecd-4618-a256-a8dfd2e93744 service nova] Acquiring lock "c87b2875-ae05-4091-93fe-7b33d4ca864b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2247.605759] env[62684]: DEBUG oslo_concurrency.lockutils [req-12a7425c-a5df-492a-9372-1396b320abfb req-252b9fbf-1ecd-4618-a256-a8dfd2e93744 service nova] Lock "c87b2875-ae05-4091-93fe-7b33d4ca864b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2247.605759] env[62684]: DEBUG oslo_concurrency.lockutils [req-12a7425c-a5df-492a-9372-1396b320abfb req-252b9fbf-1ecd-4618-a256-a8dfd2e93744 service nova] Lock "c87b2875-ae05-4091-93fe-7b33d4ca864b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2247.605759] env[62684]: DEBUG nova.compute.manager [req-12a7425c-a5df-492a-9372-1396b320abfb req-252b9fbf-1ecd-4618-a256-a8dfd2e93744 service nova] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] No waiting events found dispatching network-vif-plugged-e2e433b0-9c1a-4fe6-bc77-e1e5416b042a {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2247.607572] env[62684]: WARNING nova.compute.manager [req-12a7425c-a5df-492a-9372-1396b320abfb req-252b9fbf-1ecd-4618-a256-a8dfd2e93744 service nova] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Received unexpected event network-vif-plugged-e2e433b0-9c1a-4fe6-bc77-e1e5416b042a for instance with vm_state building and task_state spawning. [ 2247.638493] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Releasing lock "refresh_cache-68ed9549-14ab-4f90-bd78-925f289dc029" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2247.642325] env[62684]: DEBUG nova.compute.manager [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Instance network_info: |[{"id": "22389f73-0c07-4171-893b-941798deca07", "address": "fa:16:3e:b1:05:0d", "network": {"id": "7678b347-6a54-4b84-9a4d-b566bbeb1ea4", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-51664912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76d88ac878d44480b3b54b24ab87efa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22389f73-0c", "ovs_interfaceid": "22389f73-0c07-4171-893b-941798deca07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2247.642325] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b1:05:0d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'de5fcb06-b0d0-467f-86fe-06882165ac31', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '22389f73-0c07-4171-893b-941798deca07', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2247.651625] env[62684]: DEBUG oslo.service.loopingcall [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2247.652102] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2247.652631] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ead684e1-b623-446a-a9aa-fd7ce0841e40 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.677943] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2247.677943] env[62684]: value = "task-2053651" [ 2247.677943] env[62684]: _type = "Task" [ 2247.677943] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2247.697182] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053651, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2247.780617] env[62684]: DEBUG oslo_vmware.api [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52dfe8d8-34fd-6902-f32d-97c19b49b453, 'name': SearchDatastore_Task, 'duration_secs': 0.04508} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2247.781866] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4996e216-7e40-43f5-bb7a-1d455dada0ec {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.791574] env[62684]: DEBUG oslo_vmware.api [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for the task: (returnval){ [ 2247.791574] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]525bd20e-f135-8dc1-d5ac-68c76a83b1ec" [ 2247.791574] env[62684]: _type = "Task" [ 2247.791574] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2247.802848] env[62684]: DEBUG oslo_vmware.api [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]525bd20e-f135-8dc1-d5ac-68c76a83b1ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2247.831875] env[62684]: DEBUG nova.network.neutron [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Successfully updated port: e2e433b0-9c1a-4fe6-bc77-e1e5416b042a {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2247.859752] env[62684]: DEBUG nova.scheduler.client.report [None req-528f94d3-962b-4108-8a04-3eb034fc247e tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2247.962970] env[62684]: DEBUG oslo_vmware.api [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053650, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2248.189731] env[62684]: DEBUG oslo_concurrency.lockutils [None req-06d23ba8-161b-436d-9074-91fd0e7894ac tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquiring lock "b1f70e39-bf37-4fb8-b95b-653b59bec265" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2248.190134] env[62684]: DEBUG oslo_concurrency.lockutils [None req-06d23ba8-161b-436d-9074-91fd0e7894ac tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lock "b1f70e39-bf37-4fb8-b95b-653b59bec265" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2248.190374] env[62684]: DEBUG oslo_concurrency.lockutils [None req-06d23ba8-161b-436d-9074-91fd0e7894ac tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquiring lock "b1f70e39-bf37-4fb8-b95b-653b59bec265-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2248.190600] env[62684]: DEBUG oslo_concurrency.lockutils [None req-06d23ba8-161b-436d-9074-91fd0e7894ac tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lock "b1f70e39-bf37-4fb8-b95b-653b59bec265-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2248.190875] env[62684]: DEBUG oslo_concurrency.lockutils [None req-06d23ba8-161b-436d-9074-91fd0e7894ac tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lock "b1f70e39-bf37-4fb8-b95b-653b59bec265-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2248.196520] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053651, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2248.197107] env[62684]: INFO nova.compute.manager [None req-06d23ba8-161b-436d-9074-91fd0e7894ac tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Terminating instance [ 2248.203265] env[62684]: DEBUG nova.compute.manager [None req-06d23ba8-161b-436d-9074-91fd0e7894ac tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2248.203265] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-06d23ba8-161b-436d-9074-91fd0e7894ac tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2248.203265] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97aa3428-c52f-4533-acc2-62d39274035c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.222045] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-06d23ba8-161b-436d-9074-91fd0e7894ac tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2248.222045] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3d3da3a5-7b16-46f3-a79c-75a800ad05a8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.231941] env[62684]: DEBUG oslo_vmware.api [None req-06d23ba8-161b-436d-9074-91fd0e7894ac tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 2248.231941] env[62684]: value = "task-2053652" [ 2248.231941] env[62684]: _type = "Task" [ 2248.231941] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2248.251193] env[62684]: DEBUG oslo_vmware.api [None req-06d23ba8-161b-436d-9074-91fd0e7894ac tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053652, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2248.309349] env[62684]: DEBUG oslo_vmware.api [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]525bd20e-f135-8dc1-d5ac-68c76a83b1ec, 'name': SearchDatastore_Task, 'duration_secs': 0.01267} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2248.309663] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2248.309941] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d/0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2248.310234] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5cada598-fd30-402d-8b5d-29b094605348 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.320099] env[62684]: DEBUG oslo_vmware.api [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for the task: (returnval){ [ 2248.320099] env[62684]: value = "task-2053653" [ 2248.320099] env[62684]: _type = "Task" [ 2248.320099] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2248.330920] env[62684]: DEBUG oslo_vmware.api [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053653, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2248.334788] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "refresh_cache-c87b2875-ae05-4091-93fe-7b33d4ca864b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2248.334905] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquired lock "refresh_cache-c87b2875-ae05-4091-93fe-7b33d4ca864b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2248.335045] env[62684]: DEBUG nova.network.neutron [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2248.367826] env[62684]: DEBUG oslo_concurrency.lockutils [None req-528f94d3-962b-4108-8a04-3eb034fc247e tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.774s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2248.370690] env[62684]: DEBUG oslo_concurrency.lockutils [None req-02356f11-6b01-4092-a365-32307288b882 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.698s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2248.370833] env[62684]: DEBUG oslo_concurrency.lockutils [None req-02356f11-6b01-4092-a365-32307288b882 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2248.398382] env[62684]: INFO nova.scheduler.client.report [None req-528f94d3-962b-4108-8a04-3eb034fc247e tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Deleted allocations for instance 584845d2-d146-42bf-8ef5-58532fe24f65 [ 2248.400670] env[62684]: INFO nova.scheduler.client.report [None req-02356f11-6b01-4092-a365-32307288b882 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Deleted allocations for instance 264c6900-dbef-455e-95cc-1df73c735cc8 [ 2248.455777] env[62684]: DEBUG oslo_vmware.api [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053650, 'name': ReconfigVM_Task, 'duration_secs': 1.340715} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2248.456172] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Reconfigured VM instance instance-00000062 to attach disk [datastore1] ba12fa9a-10e3-4624-98b5-4ff7365e1940/ba12fa9a-10e3-4624-98b5-4ff7365e1940.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2248.456860] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-56135c46-b3b2-4a7f-934f-362d8ec3663a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.468873] env[62684]: DEBUG oslo_vmware.api [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2248.468873] env[62684]: value = "task-2053654" [ 2248.468873] env[62684]: _type = "Task" [ 2248.468873] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2248.479705] env[62684]: DEBUG oslo_vmware.api [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053654, 'name': Rename_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2248.693662] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053651, 'name': CreateVM_Task, 'duration_secs': 0.928889} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2248.693997] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2248.694557] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2248.694731] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2248.695091] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2248.695384] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9c1128f-b0fc-4571-97ff-35bd4093d678 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.703044] env[62684]: DEBUG oslo_vmware.api [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2248.703044] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b53956-57ed-3653-73ea-4d9c0bad9687" [ 2248.703044] env[62684]: _type = "Task" [ 2248.703044] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2248.715732] env[62684]: DEBUG oslo_vmware.api [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b53956-57ed-3653-73ea-4d9c0bad9687, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2248.747530] env[62684]: DEBUG oslo_vmware.api [None req-06d23ba8-161b-436d-9074-91fd0e7894ac tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053652, 'name': PowerOffVM_Task, 'duration_secs': 0.220751} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2248.748016] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-06d23ba8-161b-436d-9074-91fd0e7894ac tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2248.748315] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-06d23ba8-161b-436d-9074-91fd0e7894ac tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2248.748840] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-280d2062-d64f-405e-b950-1b105ff86a0d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.833128] env[62684]: DEBUG oslo_vmware.api [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053653, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2248.913963] env[62684]: DEBUG oslo_concurrency.lockutils [None req-528f94d3-962b-4108-8a04-3eb034fc247e tempest-AttachVolumeTestJSON-181164161 tempest-AttachVolumeTestJSON-181164161-project-member] Lock "584845d2-d146-42bf-8ef5-58532fe24f65" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.649s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2248.920645] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-06d23ba8-161b-436d-9074-91fd0e7894ac tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2248.921066] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-06d23ba8-161b-436d-9074-91fd0e7894ac tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2248.921247] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-06d23ba8-161b-436d-9074-91fd0e7894ac tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Deleting the datastore file [datastore1] b1f70e39-bf37-4fb8-b95b-653b59bec265 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2248.922041] env[62684]: DEBUG nova.network.neutron [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2248.926575] env[62684]: DEBUG oslo_concurrency.lockutils [None req-02356f11-6b01-4092-a365-32307288b882 tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "264c6900-dbef-455e-95cc-1df73c735cc8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.653s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2248.927593] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b1b83356-8d58-421c-bbb2-1101ef4614d0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.939607] env[62684]: DEBUG oslo_vmware.api [None req-06d23ba8-161b-436d-9074-91fd0e7894ac tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for the task: (returnval){ [ 2248.939607] env[62684]: value = "task-2053656" [ 2248.939607] env[62684]: _type = "Task" [ 2248.939607] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2248.950599] env[62684]: DEBUG oslo_vmware.api [None req-06d23ba8-161b-436d-9074-91fd0e7894ac tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053656, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2248.979089] env[62684]: DEBUG oslo_vmware.api [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053654, 'name': Rename_Task, 'duration_secs': 0.180406} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2248.979377] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2248.979676] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-de490d33-8419-4f8e-bd3c-6852dacce1e9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.987736] env[62684]: DEBUG oslo_vmware.api [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2248.987736] env[62684]: value = "task-2053657" [ 2248.987736] env[62684]: _type = "Task" [ 2248.987736] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2248.996631] env[62684]: DEBUG oslo_vmware.api [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053657, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2249.182900] env[62684]: DEBUG nova.network.neutron [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Updating instance_info_cache with network_info: [{"id": "e2e433b0-9c1a-4fe6-bc77-e1e5416b042a", "address": "fa:16:3e:80:1f:21", "network": {"id": "b24dd0c0-a394-4ca6-a79a-94535bc1df6f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2023102141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "607a0aa1049640d882d7dd490f5f98ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape2e433b0-9c", "ovs_interfaceid": "e2e433b0-9c1a-4fe6-bc77-e1e5416b042a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2249.219601] env[62684]: DEBUG oslo_vmware.api [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b53956-57ed-3653-73ea-4d9c0bad9687, 'name': SearchDatastore_Task, 'duration_secs': 0.023756} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2249.220670] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2249.221196] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2249.222614] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2249.222820] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2249.223163] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2249.224207] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1c24019c-a8af-4966-92e9-ea478a6d3b78 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.236384] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2249.236594] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2249.237688] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8ec4291-d772-476a-9ca1-7c8c5644ad4c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.244477] env[62684]: DEBUG oslo_vmware.api [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2249.244477] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5248dcc8-47c9-4d05-f870-f87c2bc66e01" [ 2249.244477] env[62684]: _type = "Task" [ 2249.244477] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2249.252844] env[62684]: DEBUG oslo_vmware.api [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5248dcc8-47c9-4d05-f870-f87c2bc66e01, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2249.331348] env[62684]: DEBUG oslo_vmware.api [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053653, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.583761} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2249.331982] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d/0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2249.332238] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2249.332506] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-47e134ed-96c3-4a09-85ae-9e5690724750 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.342426] env[62684]: DEBUG oslo_vmware.api [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for the task: (returnval){ [ 2249.342426] env[62684]: value = "task-2053658" [ 2249.342426] env[62684]: _type = "Task" [ 2249.342426] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2249.354338] env[62684]: DEBUG oslo_vmware.api [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053658, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2249.376752] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "5b3668f3-219d-4304-bc9e-9b911762085d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2249.376752] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "5b3668f3-219d-4304-bc9e-9b911762085d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2249.451509] env[62684]: DEBUG oslo_vmware.api [None req-06d23ba8-161b-436d-9074-91fd0e7894ac tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Task: {'id': task-2053656, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154595} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2249.451742] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-06d23ba8-161b-436d-9074-91fd0e7894ac tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2249.451960] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-06d23ba8-161b-436d-9074-91fd0e7894ac tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2249.452171] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-06d23ba8-161b-436d-9074-91fd0e7894ac tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2249.452353] env[62684]: INFO nova.compute.manager [None req-06d23ba8-161b-436d-9074-91fd0e7894ac tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Took 1.25 seconds to destroy the instance on the hypervisor. [ 2249.452596] env[62684]: DEBUG oslo.service.loopingcall [None req-06d23ba8-161b-436d-9074-91fd0e7894ac tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2249.452794] env[62684]: DEBUG nova.compute.manager [-] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2249.452888] env[62684]: DEBUG nova.network.neutron [-] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2249.500408] env[62684]: DEBUG oslo_vmware.api [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053657, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2249.639630] env[62684]: DEBUG nova.compute.manager [req-22671d40-9a01-4f0d-a432-accb484f7273 req-76875ab0-2447-4c05-914e-17c268f1b2a7 service nova] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Received event network-changed-e2e433b0-9c1a-4fe6-bc77-e1e5416b042a {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2249.639630] env[62684]: DEBUG nova.compute.manager [req-22671d40-9a01-4f0d-a432-accb484f7273 req-76875ab0-2447-4c05-914e-17c268f1b2a7 service nova] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Refreshing instance network info cache due to event network-changed-e2e433b0-9c1a-4fe6-bc77-e1e5416b042a. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2249.639630] env[62684]: DEBUG oslo_concurrency.lockutils [req-22671d40-9a01-4f0d-a432-accb484f7273 req-76875ab0-2447-4c05-914e-17c268f1b2a7 service nova] Acquiring lock "refresh_cache-c87b2875-ae05-4091-93fe-7b33d4ca864b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2249.687412] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Releasing lock "refresh_cache-c87b2875-ae05-4091-93fe-7b33d4ca864b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2249.687412] env[62684]: DEBUG nova.compute.manager [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Instance network_info: |[{"id": "e2e433b0-9c1a-4fe6-bc77-e1e5416b042a", "address": "fa:16:3e:80:1f:21", "network": {"id": "b24dd0c0-a394-4ca6-a79a-94535bc1df6f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2023102141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "607a0aa1049640d882d7dd490f5f98ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape2e433b0-9c", "ovs_interfaceid": "e2e433b0-9c1a-4fe6-bc77-e1e5416b042a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2249.687412] env[62684]: DEBUG oslo_concurrency.lockutils [req-22671d40-9a01-4f0d-a432-accb484f7273 req-76875ab0-2447-4c05-914e-17c268f1b2a7 service nova] Acquired lock "refresh_cache-c87b2875-ae05-4091-93fe-7b33d4ca864b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2249.687710] env[62684]: DEBUG nova.network.neutron [req-22671d40-9a01-4f0d-a432-accb484f7273 req-76875ab0-2447-4c05-914e-17c268f1b2a7 service nova] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Refreshing network info cache for port e2e433b0-9c1a-4fe6-bc77-e1e5416b042a {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2249.688854] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:80:1f:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e2e433b0-9c1a-4fe6-bc77-e1e5416b042a', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2249.698562] env[62684]: DEBUG oslo.service.loopingcall [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2249.699605] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2249.699605] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7cf18acf-4153-4494-83a9-9d443ccd16b5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.725808] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2249.725808] env[62684]: value = "task-2053660" [ 2249.725808] env[62684]: _type = "Task" [ 2249.725808] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2249.735052] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053660, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2249.757245] env[62684]: DEBUG oslo_vmware.api [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5248dcc8-47c9-4d05-f870-f87c2bc66e01, 'name': SearchDatastore_Task, 'duration_secs': 0.011011} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2249.758168] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c754ae72-d19b-4c36-9a9a-d595aae5c165 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.765375] env[62684]: DEBUG oslo_vmware.api [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2249.765375] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52302c1c-d76f-1b90-cd90-1b91770eebfb" [ 2249.765375] env[62684]: _type = "Task" [ 2249.765375] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2249.776575] env[62684]: DEBUG oslo_vmware.api [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52302c1c-d76f-1b90-cd90-1b91770eebfb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2249.855563] env[62684]: DEBUG oslo_vmware.api [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053658, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074145} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2249.855899] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2249.856779] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38d7c5b3-579f-4791-b4fa-4a574e1bc377 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.880123] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Reconfiguring VM instance instance-00000069 to attach disk [datastore2] 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d/0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2249.880563] env[62684]: DEBUG nova.compute.manager [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2249.883220] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f37dd9f1-831f-467e-9c5e-e48320d40835 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.904278] env[62684]: DEBUG oslo_vmware.api [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for the task: (returnval){ [ 2249.904278] env[62684]: value = "task-2053661" [ 2249.904278] env[62684]: _type = "Task" [ 2249.904278] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2249.913971] env[62684]: DEBUG oslo_vmware.api [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053661, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2250.000210] env[62684]: DEBUG oslo_vmware.api [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053657, 'name': PowerOnVM_Task, 'duration_secs': 0.715035} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2250.000541] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2250.000800] env[62684]: DEBUG nova.compute.manager [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2250.001797] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54178b08-2012-4ef5-9df2-89dad7dce062 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.236340] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053660, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2250.276744] env[62684]: DEBUG oslo_vmware.api [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52302c1c-d76f-1b90-cd90-1b91770eebfb, 'name': SearchDatastore_Task, 'duration_secs': 0.012362} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2250.276744] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2250.276744] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 68ed9549-14ab-4f90-bd78-925f289dc029/68ed9549-14ab-4f90-bd78-925f289dc029.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2250.276998] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-be327669-610e-4fd4-a033-8cc5210776ed {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.279111] env[62684]: DEBUG nova.network.neutron [-] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2250.286347] env[62684]: DEBUG oslo_vmware.api [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2250.286347] env[62684]: value = "task-2053662" [ 2250.286347] env[62684]: _type = "Task" [ 2250.286347] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2250.296537] env[62684]: DEBUG oslo_vmware.api [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053662, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2250.420808] env[62684]: DEBUG oslo_vmware.api [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053661, 'name': ReconfigVM_Task, 'duration_secs': 0.322469} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2250.421663] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Reconfigured VM instance instance-00000069 to attach disk [datastore2] 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d/0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2250.422111] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0ca860de-33cf-4ccd-9256-1c6cc420f39f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.426491] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2250.426766] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2250.428438] env[62684]: INFO nova.compute.claims [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2250.433284] env[62684]: DEBUG oslo_vmware.api [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for the task: (returnval){ [ 2250.433284] env[62684]: value = "task-2053663" [ 2250.433284] env[62684]: _type = "Task" [ 2250.433284] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2250.443802] env[62684]: DEBUG oslo_vmware.api [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053663, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2250.508550] env[62684]: DEBUG nova.network.neutron [req-22671d40-9a01-4f0d-a432-accb484f7273 req-76875ab0-2447-4c05-914e-17c268f1b2a7 service nova] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Updated VIF entry in instance network info cache for port e2e433b0-9c1a-4fe6-bc77-e1e5416b042a. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2250.509438] env[62684]: DEBUG nova.network.neutron [req-22671d40-9a01-4f0d-a432-accb484f7273 req-76875ab0-2447-4c05-914e-17c268f1b2a7 service nova] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Updating instance_info_cache with network_info: [{"id": "e2e433b0-9c1a-4fe6-bc77-e1e5416b042a", "address": "fa:16:3e:80:1f:21", "network": {"id": "b24dd0c0-a394-4ca6-a79a-94535bc1df6f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2023102141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "607a0aa1049640d882d7dd490f5f98ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape2e433b0-9c", "ovs_interfaceid": "e2e433b0-9c1a-4fe6-bc77-e1e5416b042a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2250.517641] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2250.738914] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053660, 'name': CreateVM_Task, 'duration_secs': 0.806483} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2250.739296] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2250.739767] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2250.739968] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2250.740311] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2250.740600] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6231b1cc-5e31-4505-9eb8-f4e21e09b595 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.746097] env[62684]: DEBUG oslo_vmware.api [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2250.746097] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5229515d-2fe1-c564-536c-1a66cd825b78" [ 2250.746097] env[62684]: _type = "Task" [ 2250.746097] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2250.755052] env[62684]: DEBUG oslo_vmware.api [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5229515d-2fe1-c564-536c-1a66cd825b78, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2250.782193] env[62684]: INFO nova.compute.manager [-] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Took 1.33 seconds to deallocate network for instance. [ 2250.797129] env[62684]: DEBUG oslo_vmware.api [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053662, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2250.947319] env[62684]: DEBUG oslo_vmware.api [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053663, 'name': Rename_Task, 'duration_secs': 0.158625} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2250.948383] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2250.948548] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6ca4b8ab-43d7-40db-8946-e06f23aa677c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.955158] env[62684]: DEBUG oslo_vmware.api [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for the task: (returnval){ [ 2250.955158] env[62684]: value = "task-2053664" [ 2250.955158] env[62684]: _type = "Task" [ 2250.955158] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2250.963713] env[62684]: DEBUG oslo_vmware.api [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053664, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2251.011884] env[62684]: DEBUG oslo_concurrency.lockutils [req-22671d40-9a01-4f0d-a432-accb484f7273 req-76875ab0-2447-4c05-914e-17c268f1b2a7 service nova] Releasing lock "refresh_cache-c87b2875-ae05-4091-93fe-7b33d4ca864b" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2251.257798] env[62684]: DEBUG oslo_vmware.api [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5229515d-2fe1-c564-536c-1a66cd825b78, 'name': SearchDatastore_Task, 'duration_secs': 0.020562} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2251.258120] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2251.258379] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2251.258625] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2251.258775] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2251.258957] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2251.259237] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1a1f3a81-b3a6-4c67-b83d-1a284d680280 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.273020] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2251.273229] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2251.273945] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-001f1d96-8e2e-4ff7-8b78-e1446b203e89 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.280644] env[62684]: DEBUG oslo_vmware.api [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2251.280644] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b2944e-fd75-fe4a-5830-0cdd8cab8482" [ 2251.280644] env[62684]: _type = "Task" [ 2251.280644] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2251.289309] env[62684]: DEBUG oslo_vmware.api [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b2944e-fd75-fe4a-5830-0cdd8cab8482, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2251.293012] env[62684]: DEBUG oslo_concurrency.lockutils [None req-06d23ba8-161b-436d-9074-91fd0e7894ac tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2251.298488] env[62684]: DEBUG oslo_vmware.api [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053662, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.64344} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2251.298729] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 68ed9549-14ab-4f90-bd78-925f289dc029/68ed9549-14ab-4f90-bd78-925f289dc029.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2251.298940] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2251.299197] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-da4248bf-db21-44d3-980a-2134be4631ad {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.306787] env[62684]: DEBUG oslo_vmware.api [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2251.306787] env[62684]: value = "task-2053665" [ 2251.306787] env[62684]: _type = "Task" [ 2251.306787] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2251.315210] env[62684]: DEBUG oslo_vmware.api [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053665, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2251.468409] env[62684]: DEBUG oslo_vmware.api [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053664, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2251.579312] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c4216cc-234e-4f0f-81f5-c19aa87f3722 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.589524] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8abf4c01-a4c5-4cd8-8d8d-fe9a70f57d7d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.628545] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03c77e4d-eb7d-4353-9112-2fe61cacd948 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.638498] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebd6865d-1dbd-43fa-9876-5d5c2df822cc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.654164] env[62684]: DEBUG nova.compute.provider_tree [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2251.682302] env[62684]: DEBUG nova.compute.manager [req-e6e943bd-691e-4207-9f03-e5b1ba3c4a66 req-a1477872-51a2-48e6-a812-33917cb5f30c service nova] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Received event network-vif-deleted-35a0f9ef-b68c-43df-8887-6c35257bbc58 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2251.793215] env[62684]: DEBUG oslo_vmware.api [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b2944e-fd75-fe4a-5830-0cdd8cab8482, 'name': SearchDatastore_Task, 'duration_secs': 0.010249} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2251.793978] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3fb88fab-e0a5-423f-b914-377e064ab470 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.800105] env[62684]: DEBUG oslo_vmware.api [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2251.800105] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b53fc7-2cd1-cfb5-5d07-c26db2175d57" [ 2251.800105] env[62684]: _type = "Task" [ 2251.800105] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2251.807751] env[62684]: DEBUG oslo_vmware.api [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b53fc7-2cd1-cfb5-5d07-c26db2175d57, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2251.814714] env[62684]: DEBUG oslo_vmware.api [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053665, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081035} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2251.814941] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2251.815669] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dff0fba3-3690-406b-acf0-209ce40e3769 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.836466] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] 68ed9549-14ab-4f90-bd78-925f289dc029/68ed9549-14ab-4f90-bd78-925f289dc029.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2251.836746] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0d6f453e-caec-44c6-bcbb-43aa3e428ba6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.856648] env[62684]: DEBUG oslo_vmware.api [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2251.856648] env[62684]: value = "task-2053667" [ 2251.856648] env[62684]: _type = "Task" [ 2251.856648] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2251.864971] env[62684]: DEBUG oslo_vmware.api [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053667, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2251.965752] env[62684]: DEBUG oslo_vmware.api [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053664, 'name': PowerOnVM_Task, 'duration_secs': 0.68754} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2251.966054] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2251.966286] env[62684]: INFO nova.compute.manager [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Took 9.68 seconds to spawn the instance on the hypervisor. [ 2251.966474] env[62684]: DEBUG nova.compute.manager [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2251.967299] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49870728-5ac0-4352-9564-018d02d184f3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2252.156061] env[62684]: DEBUG nova.scheduler.client.report [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2252.312944] env[62684]: DEBUG oslo_vmware.api [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b53fc7-2cd1-cfb5-5d07-c26db2175d57, 'name': SearchDatastore_Task, 'duration_secs': 0.056558} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2252.313213] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2252.313483] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] c87b2875-ae05-4091-93fe-7b33d4ca864b/c87b2875-ae05-4091-93fe-7b33d4ca864b.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2252.313757] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-de224030-a658-4537-86a2-419ed04adfc9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2252.321844] env[62684]: DEBUG oslo_vmware.api [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2252.321844] env[62684]: value = "task-2053668" [ 2252.321844] env[62684]: _type = "Task" [ 2252.321844] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2252.330286] env[62684]: DEBUG oslo_vmware.api [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053668, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2252.367363] env[62684]: DEBUG oslo_vmware.api [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053667, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2252.483889] env[62684]: INFO nova.compute.manager [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Took 18.90 seconds to build instance. [ 2252.660791] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.234s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2252.661416] env[62684]: DEBUG nova.compute.manager [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2252.664150] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 2.147s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2252.664360] env[62684]: DEBUG nova.objects.instance [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62684) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 2252.832880] env[62684]: DEBUG oslo_vmware.api [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053668, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2252.866562] env[62684]: DEBUG oslo_vmware.api [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053667, 'name': ReconfigVM_Task, 'duration_secs': 0.537958} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2252.868057] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Reconfigured VM instance instance-0000006a to attach disk [datastore1] 68ed9549-14ab-4f90-bd78-925f289dc029/68ed9549-14ab-4f90-bd78-925f289dc029.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2252.868057] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2769040a-8449-4a51-9734-ca8d4ff12215 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2252.874775] env[62684]: DEBUG oslo_vmware.api [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2252.874775] env[62684]: value = "task-2053669" [ 2252.874775] env[62684]: _type = "Task" [ 2252.874775] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2252.883206] env[62684]: DEBUG oslo_vmware.api [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053669, 'name': Rename_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2252.917804] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0c1e3eeb-3d45-4ec3-94de-ae332b2760eb tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Acquiring lock "0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2252.986468] env[62684]: DEBUG oslo_concurrency.lockutils [None req-fe25a3e9-87cc-446c-a92b-cef217540d59 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Lock "0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.415s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2252.986962] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0c1e3eeb-3d45-4ec3-94de-ae332b2760eb tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Lock "0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.069s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2252.987489] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0c1e3eeb-3d45-4ec3-94de-ae332b2760eb tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Acquiring lock "0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2252.987761] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0c1e3eeb-3d45-4ec3-94de-ae332b2760eb tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Lock "0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2252.988110] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0c1e3eeb-3d45-4ec3-94de-ae332b2760eb tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Lock "0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2252.991314] env[62684]: INFO nova.compute.manager [None req-0c1e3eeb-3d45-4ec3-94de-ae332b2760eb tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Terminating instance [ 2252.994241] env[62684]: DEBUG nova.compute.manager [None req-0c1e3eeb-3d45-4ec3-94de-ae332b2760eb tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2252.994607] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0c1e3eeb-3d45-4ec3-94de-ae332b2760eb tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2252.995909] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7003e4d4-3f7d-40a4-9905-f0b0dc28c10a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.008983] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c1e3eeb-3d45-4ec3-94de-ae332b2760eb tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2253.009361] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-34e48b3d-7395-4937-887e-7b4848aff1ff {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.019863] env[62684]: DEBUG oslo_vmware.api [None req-0c1e3eeb-3d45-4ec3-94de-ae332b2760eb tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for the task: (returnval){ [ 2253.019863] env[62684]: value = "task-2053670" [ 2253.019863] env[62684]: _type = "Task" [ 2253.019863] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2253.034527] env[62684]: DEBUG oslo_vmware.api [None req-0c1e3eeb-3d45-4ec3-94de-ae332b2760eb tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053670, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2253.168345] env[62684]: DEBUG nova.compute.utils [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2253.170303] env[62684]: DEBUG nova.compute.manager [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2253.170483] env[62684]: DEBUG nova.network.neutron [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2253.225353] env[62684]: DEBUG nova.policy [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '58ea1db87d2b44408282a8b82d799443', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '947e7359aaba456fa1763f4dc8e9d359', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2253.335304] env[62684]: DEBUG oslo_vmware.api [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053668, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.916061} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2253.335787] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] c87b2875-ae05-4091-93fe-7b33d4ca864b/c87b2875-ae05-4091-93fe-7b33d4ca864b.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2253.336037] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2253.336338] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6f70017b-68fe-4789-9219-bc391d1a34e3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.345168] env[62684]: DEBUG oslo_vmware.api [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2253.345168] env[62684]: value = "task-2053671" [ 2253.345168] env[62684]: _type = "Task" [ 2253.345168] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2253.355714] env[62684]: DEBUG oslo_vmware.api [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053671, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2253.385457] env[62684]: DEBUG oslo_vmware.api [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053669, 'name': Rename_Task, 'duration_secs': 0.197795} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2253.385759] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2253.386052] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8c3cf6b0-0e4e-4955-afc3-1cb2b94ac915 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.393897] env[62684]: DEBUG oslo_vmware.api [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2253.393897] env[62684]: value = "task-2053672" [ 2253.393897] env[62684]: _type = "Task" [ 2253.393897] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2253.404371] env[62684]: DEBUG oslo_vmware.api [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053672, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2253.511988] env[62684]: DEBUG nova.network.neutron [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Successfully created port: adc3c1c4-6d99-419c-b176-d3f75d6a908c {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2253.531347] env[62684]: DEBUG oslo_vmware.api [None req-0c1e3eeb-3d45-4ec3-94de-ae332b2760eb tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053670, 'name': PowerOffVM_Task, 'duration_secs': 0.330674} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2253.531347] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c1e3eeb-3d45-4ec3-94de-ae332b2760eb tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2253.531347] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0c1e3eeb-3d45-4ec3-94de-ae332b2760eb tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2253.531347] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-55034087-ba3b-43df-9fac-50b536a796b2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.678332] env[62684]: DEBUG nova.compute.manager [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2253.689229] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7c495a4f-d1c0-4ddf-a82a-c49ac01fdab4 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.022s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2253.690739] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0c1e3eeb-3d45-4ec3-94de-ae332b2760eb tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2253.691161] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0c1e3eeb-3d45-4ec3-94de-ae332b2760eb tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2253.691472] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c1e3eeb-3d45-4ec3-94de-ae332b2760eb tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Deleting the datastore file [datastore2] 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2253.692429] env[62684]: DEBUG oslo_concurrency.lockutils [None req-06d23ba8-161b-436d-9074-91fd0e7894ac tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.399s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2253.692706] env[62684]: DEBUG nova.objects.instance [None req-06d23ba8-161b-436d-9074-91fd0e7894ac tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lazy-loading 'resources' on Instance uuid b1f70e39-bf37-4fb8-b95b-653b59bec265 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2253.693924] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0d0dfe19-a43f-49b0-af25-6fd2103e0200 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.704058] env[62684]: DEBUG oslo_vmware.api [None req-0c1e3eeb-3d45-4ec3-94de-ae332b2760eb tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for the task: (returnval){ [ 2253.704058] env[62684]: value = "task-2053674" [ 2253.704058] env[62684]: _type = "Task" [ 2253.704058] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2253.720025] env[62684]: DEBUG oslo_vmware.api [None req-0c1e3eeb-3d45-4ec3-94de-ae332b2760eb tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053674, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2253.856111] env[62684]: DEBUG oslo_vmware.api [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053671, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072381} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2253.856111] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2253.856550] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07c472ee-faf3-4ef0-bbaf-21e11c399ad4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.880025] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Reconfiguring VM instance instance-0000006b to attach disk [datastore2] c87b2875-ae05-4091-93fe-7b33d4ca864b/c87b2875-ae05-4091-93fe-7b33d4ca864b.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2253.880285] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bf14c9e7-aa66-48ea-a824-a8437db13dc7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.904788] env[62684]: DEBUG oslo_vmware.api [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053672, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2253.906320] env[62684]: DEBUG oslo_vmware.api [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2253.906320] env[62684]: value = "task-2053675" [ 2253.906320] env[62684]: _type = "Task" [ 2253.906320] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2253.914224] env[62684]: DEBUG oslo_vmware.api [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053675, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2254.217811] env[62684]: DEBUG oslo_vmware.api [None req-0c1e3eeb-3d45-4ec3-94de-ae332b2760eb tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053674, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.179101} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2254.217811] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c1e3eeb-3d45-4ec3-94de-ae332b2760eb tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2254.217811] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0c1e3eeb-3d45-4ec3-94de-ae332b2760eb tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2254.217983] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0c1e3eeb-3d45-4ec3-94de-ae332b2760eb tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2254.218209] env[62684]: INFO nova.compute.manager [None req-0c1e3eeb-3d45-4ec3-94de-ae332b2760eb tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Took 1.22 seconds to destroy the instance on the hypervisor. [ 2254.219236] env[62684]: DEBUG oslo.service.loopingcall [None req-0c1e3eeb-3d45-4ec3-94de-ae332b2760eb tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2254.219236] env[62684]: DEBUG nova.compute.manager [-] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2254.219236] env[62684]: DEBUG nova.network.neutron [-] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2254.359749] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5477daef-5837-424c-b46a-5b894d4d2419 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2254.367700] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5b7bebe-c2a1-41f5-bdfc-336a6855973c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2254.406087] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5f979fa-7329-4e7c-91d9-88e0354bdb0d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2254.417375] env[62684]: DEBUG oslo_vmware.api [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053672, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2254.422661] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-202dfb58-4938-4ba3-a79b-46cb0ad4dc38 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2254.431782] env[62684]: DEBUG oslo_vmware.api [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053675, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2254.445425] env[62684]: DEBUG nova.compute.provider_tree [None req-06d23ba8-161b-436d-9074-91fd0e7894ac tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2254.540312] env[62684]: DEBUG nova.compute.manager [req-5d3b0343-f16f-4c17-b3b5-cee13726f8d5 req-00d0575f-82de-4422-8e23-215dc1fe1ae3 service nova] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Received event network-vif-deleted-eea0a099-12ed-4dc1-a3b4-01695e9e14e9 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2254.540515] env[62684]: INFO nova.compute.manager [req-5d3b0343-f16f-4c17-b3b5-cee13726f8d5 req-00d0575f-82de-4422-8e23-215dc1fe1ae3 service nova] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Neutron deleted interface eea0a099-12ed-4dc1-a3b4-01695e9e14e9; detaching it from the instance and deleting it from the info cache [ 2254.540739] env[62684]: DEBUG nova.network.neutron [req-5d3b0343-f16f-4c17-b3b5-cee13726f8d5 req-00d0575f-82de-4422-8e23-215dc1fe1ae3 service nova] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2254.698291] env[62684]: DEBUG nova.compute.manager [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2254.725339] env[62684]: DEBUG nova.virt.hardware [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2254.725674] env[62684]: DEBUG nova.virt.hardware [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2254.725770] env[62684]: DEBUG nova.virt.hardware [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2254.725962] env[62684]: DEBUG nova.virt.hardware [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2254.726138] env[62684]: DEBUG nova.virt.hardware [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2254.726295] env[62684]: DEBUG nova.virt.hardware [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2254.726510] env[62684]: DEBUG nova.virt.hardware [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2254.726680] env[62684]: DEBUG nova.virt.hardware [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2254.726856] env[62684]: DEBUG nova.virt.hardware [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2254.727038] env[62684]: DEBUG nova.virt.hardware [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2254.727232] env[62684]: DEBUG nova.virt.hardware [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2254.728251] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd52b770-332e-4fda-8ae4-b9b20f0af82c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2254.736585] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c85d25d-c281-4a75-9c68-8010e74580d6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2254.913622] env[62684]: DEBUG oslo_vmware.api [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053672, 'name': PowerOnVM_Task, 'duration_secs': 1.121456} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2254.916605] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2254.916822] env[62684]: INFO nova.compute.manager [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Took 10.35 seconds to spawn the instance on the hypervisor. [ 2254.917014] env[62684]: DEBUG nova.compute.manager [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2254.917866] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3f33873-067b-44d6-810a-f0dd370f1740 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2254.930142] env[62684]: DEBUG oslo_vmware.api [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053675, 'name': ReconfigVM_Task, 'duration_secs': 0.60333} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2254.930142] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Reconfigured VM instance instance-0000006b to attach disk [datastore2] c87b2875-ae05-4091-93fe-7b33d4ca864b/c87b2875-ae05-4091-93fe-7b33d4ca864b.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2254.930355] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-35a87e08-3654-451d-8df7-da6b104f074a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2254.938468] env[62684]: DEBUG oslo_vmware.api [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2254.938468] env[62684]: value = "task-2053677" [ 2254.938468] env[62684]: _type = "Task" [ 2254.938468] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2254.947577] env[62684]: DEBUG oslo_vmware.api [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053677, 'name': Rename_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2254.948422] env[62684]: DEBUG nova.scheduler.client.report [None req-06d23ba8-161b-436d-9074-91fd0e7894ac tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2255.017809] env[62684]: DEBUG nova.network.neutron [-] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2255.043360] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dafb7001-46b7-40ff-b5ef-a20b7d2fc3f0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.053317] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b03c037-f069-41f2-a002-d344dbcbe74b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.082606] env[62684]: DEBUG nova.compute.manager [req-5d3b0343-f16f-4c17-b3b5-cee13726f8d5 req-00d0575f-82de-4422-8e23-215dc1fe1ae3 service nova] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Detach interface failed, port_id=eea0a099-12ed-4dc1-a3b4-01695e9e14e9, reason: Instance 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2255.439134] env[62684]: INFO nova.compute.manager [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Took 20.80 seconds to build instance. [ 2255.449771] env[62684]: DEBUG oslo_vmware.api [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053677, 'name': Rename_Task, 'duration_secs': 0.293201} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2255.450086] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2255.450320] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e887fbdb-b9e8-412f-b91d-6e4f4e10a88c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.452406] env[62684]: DEBUG oslo_concurrency.lockutils [None req-06d23ba8-161b-436d-9074-91fd0e7894ac tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.760s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2255.462208] env[62684]: DEBUG oslo_vmware.api [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2255.462208] env[62684]: value = "task-2053678" [ 2255.462208] env[62684]: _type = "Task" [ 2255.462208] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2255.472358] env[62684]: DEBUG oslo_vmware.api [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053678, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2255.474547] env[62684]: INFO nova.scheduler.client.report [None req-06d23ba8-161b-436d-9074-91fd0e7894ac tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Deleted allocations for instance b1f70e39-bf37-4fb8-b95b-653b59bec265 [ 2255.520996] env[62684]: INFO nova.compute.manager [-] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Took 1.30 seconds to deallocate network for instance. [ 2255.944112] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b760cc2e-594f-4b22-a09b-521656a9c55f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "68ed9549-14ab-4f90-bd78-925f289dc029" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.312s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2255.973273] env[62684]: DEBUG oslo_vmware.api [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053678, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2255.982293] env[62684]: DEBUG oslo_concurrency.lockutils [None req-06d23ba8-161b-436d-9074-91fd0e7894ac tempest-ServerRescueNegativeTestJSON-1029344211 tempest-ServerRescueNegativeTestJSON-1029344211-project-member] Lock "b1f70e39-bf37-4fb8-b95b-653b59bec265" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.792s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2256.028546] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0c1e3eeb-3d45-4ec3-94de-ae332b2760eb tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2256.028839] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0c1e3eeb-3d45-4ec3-94de-ae332b2760eb tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2256.029088] env[62684]: DEBUG nova.objects.instance [None req-0c1e3eeb-3d45-4ec3-94de-ae332b2760eb tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Lazy-loading 'resources' on Instance uuid 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2256.473490] env[62684]: DEBUG oslo_vmware.api [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053678, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2256.553082] env[62684]: DEBUG oslo_concurrency.lockutils [None req-83ea7901-78fb-433f-b59b-13a8fd2e789f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "68ed9549-14ab-4f90-bd78-925f289dc029" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2256.553082] env[62684]: DEBUG oslo_concurrency.lockutils [None req-83ea7901-78fb-433f-b59b-13a8fd2e789f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "68ed9549-14ab-4f90-bd78-925f289dc029" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2256.553082] env[62684]: DEBUG oslo_concurrency.lockutils [None req-83ea7901-78fb-433f-b59b-13a8fd2e789f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "68ed9549-14ab-4f90-bd78-925f289dc029-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2256.553472] env[62684]: DEBUG oslo_concurrency.lockutils [None req-83ea7901-78fb-433f-b59b-13a8fd2e789f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "68ed9549-14ab-4f90-bd78-925f289dc029-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2256.553472] env[62684]: DEBUG oslo_concurrency.lockutils [None req-83ea7901-78fb-433f-b59b-13a8fd2e789f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "68ed9549-14ab-4f90-bd78-925f289dc029-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2256.558366] env[62684]: INFO nova.compute.manager [None req-83ea7901-78fb-433f-b59b-13a8fd2e789f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Terminating instance [ 2256.561151] env[62684]: DEBUG nova.compute.manager [None req-83ea7901-78fb-433f-b59b-13a8fd2e789f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2256.561369] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-83ea7901-78fb-433f-b59b-13a8fd2e789f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2256.562400] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e26095a9-a199-47ba-8d3b-f4860c2af9df {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.575174] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-83ea7901-78fb-433f-b59b-13a8fd2e789f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2256.575495] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-875875d6-ac86-4d5e-921a-7dfbafd2fa73 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.583791] env[62684]: DEBUG oslo_vmware.api [None req-83ea7901-78fb-433f-b59b-13a8fd2e789f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2256.583791] env[62684]: value = "task-2053679" [ 2256.583791] env[62684]: _type = "Task" [ 2256.583791] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2256.596082] env[62684]: DEBUG oslo_vmware.api [None req-83ea7901-78fb-433f-b59b-13a8fd2e789f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053679, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2256.670153] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdd3faf5-7c71-43c0-8bfa-7f4f76c7a37a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.678827] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a0c43c6-98e2-44f4-bb9f-8513a69108aa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.708934] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19dcee90-3392-4026-8223-dff3739fede1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.717425] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17e11cb8-df51-4d8b-b8c3-0d78eb9dedce {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.733555] env[62684]: DEBUG nova.compute.provider_tree [None req-0c1e3eeb-3d45-4ec3-94de-ae332b2760eb tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2256.974479] env[62684]: DEBUG oslo_vmware.api [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053678, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2257.095274] env[62684]: DEBUG oslo_vmware.api [None req-83ea7901-78fb-433f-b59b-13a8fd2e789f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053679, 'name': PowerOffVM_Task, 'duration_secs': 0.199646} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2257.095564] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-83ea7901-78fb-433f-b59b-13a8fd2e789f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2257.095746] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-83ea7901-78fb-433f-b59b-13a8fd2e789f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2257.096016] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-34df3e18-d03a-4005-a8b8-faa23ef2fb15 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2257.237057] env[62684]: DEBUG nova.scheduler.client.report [None req-0c1e3eeb-3d45-4ec3-94de-ae332b2760eb tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2257.476278] env[62684]: DEBUG oslo_vmware.api [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053678, 'name': PowerOnVM_Task, 'duration_secs': 1.925441} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2257.476278] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2257.476278] env[62684]: INFO nova.compute.manager [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Took 10.61 seconds to spawn the instance on the hypervisor. [ 2257.476644] env[62684]: DEBUG nova.compute.manager [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2257.477652] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5abfd36-cc22-444c-9ed3-742dd618bec0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2257.743445] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0c1e3eeb-3d45-4ec3-94de-ae332b2760eb tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.714s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2257.777639] env[62684]: INFO nova.scheduler.client.report [None req-0c1e3eeb-3d45-4ec3-94de-ae332b2760eb tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Deleted allocations for instance 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d [ 2257.997074] env[62684]: INFO nova.compute.manager [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Took 19.04 seconds to build instance. [ 2258.280722] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f0e140e0-307c-4745-b525-347774b5c78e tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "c87b2875-ae05-4091-93fe-7b33d4ca864b" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2258.290421] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0c1e3eeb-3d45-4ec3-94de-ae332b2760eb tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Lock "0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.303s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2258.499512] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bdcd501f-7dc4-4be8-a080-630950e6c9bc tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "c87b2875-ae05-4091-93fe-7b33d4ca864b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.554s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2258.499878] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f0e140e0-307c-4745-b525-347774b5c78e tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "c87b2875-ae05-4091-93fe-7b33d4ca864b" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.219s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2258.500090] env[62684]: DEBUG nova.compute.manager [None req-f0e140e0-307c-4745-b525-347774b5c78e tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2258.500995] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9aaf6e7-e84b-4345-9bed-8833d8e574ed {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2258.508621] env[62684]: DEBUG nova.compute.manager [None req-f0e140e0-307c-4745-b525-347774b5c78e tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62684) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 2258.509225] env[62684]: DEBUG nova.objects.instance [None req-f0e140e0-307c-4745-b525-347774b5c78e tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lazy-loading 'flavor' on Instance uuid c87b2875-ae05-4091-93fe-7b33d4ca864b {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2259.015576] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0e140e0-307c-4745-b525-347774b5c78e tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2259.015864] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-30d73dd0-7c0f-482e-ab6d-9e583f314853 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.024500] env[62684]: DEBUG oslo_vmware.api [None req-f0e140e0-307c-4745-b525-347774b5c78e tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2259.024500] env[62684]: value = "task-2053681" [ 2259.024500] env[62684]: _type = "Task" [ 2259.024500] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2259.035446] env[62684]: DEBUG oslo_vmware.api [None req-f0e140e0-307c-4745-b525-347774b5c78e tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053681, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2259.534510] env[62684]: DEBUG oslo_vmware.api [None req-f0e140e0-307c-4745-b525-347774b5c78e tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053681, 'name': PowerOffVM_Task, 'duration_secs': 0.283678} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2259.534843] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0e140e0-307c-4745-b525-347774b5c78e tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2259.534959] env[62684]: DEBUG nova.compute.manager [None req-f0e140e0-307c-4745-b525-347774b5c78e tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2259.535780] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d293315f-a185-40ef-9220-8ead8c46b0ce {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.052423] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f0e140e0-307c-4745-b525-347774b5c78e tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "c87b2875-ae05-4091-93fe-7b33d4ca864b" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.552s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2260.683903] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8d28f1fd-4afd-429b-b8d7-13907d227614 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "c87b2875-ae05-4091-93fe-7b33d4ca864b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2260.684050] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8d28f1fd-4afd-429b-b8d7-13907d227614 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "c87b2875-ae05-4091-93fe-7b33d4ca864b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2260.684357] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8d28f1fd-4afd-429b-b8d7-13907d227614 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "c87b2875-ae05-4091-93fe-7b33d4ca864b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2260.684607] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8d28f1fd-4afd-429b-b8d7-13907d227614 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "c87b2875-ae05-4091-93fe-7b33d4ca864b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2260.684821] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8d28f1fd-4afd-429b-b8d7-13907d227614 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "c87b2875-ae05-4091-93fe-7b33d4ca864b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2260.686909] env[62684]: INFO nova.compute.manager [None req-8d28f1fd-4afd-429b-b8d7-13907d227614 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Terminating instance [ 2260.688768] env[62684]: DEBUG nova.compute.manager [None req-8d28f1fd-4afd-429b-b8d7-13907d227614 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2260.688973] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8d28f1fd-4afd-429b-b8d7-13907d227614 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2260.689838] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07cc62f5-607b-4246-82bb-436dc4b672f2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.697843] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8d28f1fd-4afd-429b-b8d7-13907d227614 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2260.698348] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9944b715-bda9-47b2-baaf-ae209f60e315 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2261.576666] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8d28f1fd-4afd-429b-b8d7-13907d227614 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2261.576923] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8d28f1fd-4afd-429b-b8d7-13907d227614 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2261.577225] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d28f1fd-4afd-429b-b8d7-13907d227614 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Deleting the datastore file [datastore2] c87b2875-ae05-4091-93fe-7b33d4ca864b {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2261.577519] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3b96f4ee-af0e-4179-b0b9-86dc8fc80d40 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2261.583910] env[62684]: DEBUG oslo_vmware.api [None req-8d28f1fd-4afd-429b-b8d7-13907d227614 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2261.583910] env[62684]: value = "task-2053683" [ 2261.583910] env[62684]: _type = "Task" [ 2261.583910] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2261.592561] env[62684]: DEBUG oslo_vmware.api [None req-8d28f1fd-4afd-429b-b8d7-13907d227614 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053683, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2262.094932] env[62684]: DEBUG oslo_vmware.api [None req-8d28f1fd-4afd-429b-b8d7-13907d227614 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053683, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14829} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2262.095217] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d28f1fd-4afd-429b-b8d7-13907d227614 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2262.095405] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8d28f1fd-4afd-429b-b8d7-13907d227614 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2262.095584] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8d28f1fd-4afd-429b-b8d7-13907d227614 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2262.095761] env[62684]: INFO nova.compute.manager [None req-8d28f1fd-4afd-429b-b8d7-13907d227614 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Took 1.41 seconds to destroy the instance on the hypervisor. [ 2262.096007] env[62684]: DEBUG oslo.service.loopingcall [None req-8d28f1fd-4afd-429b-b8d7-13907d227614 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2262.096213] env[62684]: DEBUG nova.compute.manager [-] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2262.096309] env[62684]: DEBUG nova.network.neutron [-] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2262.135586] env[62684]: DEBUG nova.compute.manager [req-8b5084dd-9d6d-4eea-8411-6caf852f4992 req-cb4da457-311b-4971-9477-a1d10050368a service nova] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Received event network-vif-plugged-adc3c1c4-6d99-419c-b176-d3f75d6a908c {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2262.135821] env[62684]: DEBUG oslo_concurrency.lockutils [req-8b5084dd-9d6d-4eea-8411-6caf852f4992 req-cb4da457-311b-4971-9477-a1d10050368a service nova] Acquiring lock "5b3668f3-219d-4304-bc9e-9b911762085d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2262.136055] env[62684]: DEBUG oslo_concurrency.lockutils [req-8b5084dd-9d6d-4eea-8411-6caf852f4992 req-cb4da457-311b-4971-9477-a1d10050368a service nova] Lock "5b3668f3-219d-4304-bc9e-9b911762085d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2262.136233] env[62684]: DEBUG oslo_concurrency.lockutils [req-8b5084dd-9d6d-4eea-8411-6caf852f4992 req-cb4da457-311b-4971-9477-a1d10050368a service nova] Lock "5b3668f3-219d-4304-bc9e-9b911762085d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2262.136407] env[62684]: DEBUG nova.compute.manager [req-8b5084dd-9d6d-4eea-8411-6caf852f4992 req-cb4da457-311b-4971-9477-a1d10050368a service nova] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] No waiting events found dispatching network-vif-plugged-adc3c1c4-6d99-419c-b176-d3f75d6a908c {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2262.136572] env[62684]: WARNING nova.compute.manager [req-8b5084dd-9d6d-4eea-8411-6caf852f4992 req-cb4da457-311b-4971-9477-a1d10050368a service nova] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Received unexpected event network-vif-plugged-adc3c1c4-6d99-419c-b176-d3f75d6a908c for instance with vm_state building and task_state spawning. [ 2262.617863] env[62684]: DEBUG nova.network.neutron [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Successfully updated port: adc3c1c4-6d99-419c-b176-d3f75d6a908c {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2263.120304] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "refresh_cache-5b3668f3-219d-4304-bc9e-9b911762085d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2263.120465] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquired lock "refresh_cache-5b3668f3-219d-4304-bc9e-9b911762085d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2263.122777] env[62684]: DEBUG nova.network.neutron [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2263.227070] env[62684]: DEBUG nova.network.neutron [-] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2263.384652] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-83ea7901-78fb-433f-b59b-13a8fd2e789f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2263.384910] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-83ea7901-78fb-433f-b59b-13a8fd2e789f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2263.385280] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-83ea7901-78fb-433f-b59b-13a8fd2e789f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Deleting the datastore file [datastore1] 68ed9549-14ab-4f90-bd78-925f289dc029 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2263.385809] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a5491657-ef7f-4ff9-a5f1-9b0812b4bd19 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.393407] env[62684]: DEBUG oslo_vmware.api [None req-83ea7901-78fb-433f-b59b-13a8fd2e789f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for the task: (returnval){ [ 2263.393407] env[62684]: value = "task-2053684" [ 2263.393407] env[62684]: _type = "Task" [ 2263.393407] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2263.401771] env[62684]: DEBUG oslo_vmware.api [None req-83ea7901-78fb-433f-b59b-13a8fd2e789f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053684, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2263.656928] env[62684]: DEBUG nova.network.neutron [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2263.732578] env[62684]: INFO nova.compute.manager [-] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Took 1.64 seconds to deallocate network for instance. [ 2263.855724] env[62684]: DEBUG nova.network.neutron [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Updating instance_info_cache with network_info: [{"id": "adc3c1c4-6d99-419c-b176-d3f75d6a908c", "address": "fa:16:3e:8b:01:89", "network": {"id": "8136a664-f757-43b3-a2fa-bacdf2e9566c", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1799567463-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "947e7359aaba456fa1763f4dc8e9d359", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cc30a16-f070-421c-964e-50c9aa32f17a", "external-id": "nsx-vlan-transportzone-424", "segmentation_id": 424, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapadc3c1c4-6d", "ovs_interfaceid": "adc3c1c4-6d99-419c-b176-d3f75d6a908c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2263.903862] env[62684]: DEBUG oslo_vmware.api [None req-83ea7901-78fb-433f-b59b-13a8fd2e789f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Task: {'id': task-2053684, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143468} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2263.904144] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-83ea7901-78fb-433f-b59b-13a8fd2e789f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2263.904333] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-83ea7901-78fb-433f-b59b-13a8fd2e789f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2263.904511] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-83ea7901-78fb-433f-b59b-13a8fd2e789f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2263.904684] env[62684]: INFO nova.compute.manager [None req-83ea7901-78fb-433f-b59b-13a8fd2e789f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Took 7.34 seconds to destroy the instance on the hypervisor. [ 2263.904956] env[62684]: DEBUG oslo.service.loopingcall [None req-83ea7901-78fb-433f-b59b-13a8fd2e789f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2263.905129] env[62684]: DEBUG nova.compute.manager [-] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2263.905220] env[62684]: DEBUG nova.network.neutron [-] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2264.177393] env[62684]: DEBUG nova.compute.manager [req-4d1344d9-6f42-4d57-88a5-8d2b79527ea0 req-2ccc961e-6c20-4a33-a7b0-3f0371d81015 service nova] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Received event network-changed-adc3c1c4-6d99-419c-b176-d3f75d6a908c {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2264.177664] env[62684]: DEBUG nova.compute.manager [req-4d1344d9-6f42-4d57-88a5-8d2b79527ea0 req-2ccc961e-6c20-4a33-a7b0-3f0371d81015 service nova] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Refreshing instance network info cache due to event network-changed-adc3c1c4-6d99-419c-b176-d3f75d6a908c. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2264.177909] env[62684]: DEBUG oslo_concurrency.lockutils [req-4d1344d9-6f42-4d57-88a5-8d2b79527ea0 req-2ccc961e-6c20-4a33-a7b0-3f0371d81015 service nova] Acquiring lock "refresh_cache-5b3668f3-219d-4304-bc9e-9b911762085d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2264.241435] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8d28f1fd-4afd-429b-b8d7-13907d227614 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2264.241686] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8d28f1fd-4afd-429b-b8d7-13907d227614 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2264.241923] env[62684]: DEBUG nova.objects.instance [None req-8d28f1fd-4afd-429b-b8d7-13907d227614 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lazy-loading 'resources' on Instance uuid c87b2875-ae05-4091-93fe-7b33d4ca864b {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2264.357822] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Releasing lock "refresh_cache-5b3668f3-219d-4304-bc9e-9b911762085d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2264.358165] env[62684]: DEBUG nova.compute.manager [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Instance network_info: |[{"id": "adc3c1c4-6d99-419c-b176-d3f75d6a908c", "address": "fa:16:3e:8b:01:89", "network": {"id": "8136a664-f757-43b3-a2fa-bacdf2e9566c", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1799567463-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "947e7359aaba456fa1763f4dc8e9d359", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cc30a16-f070-421c-964e-50c9aa32f17a", "external-id": "nsx-vlan-transportzone-424", "segmentation_id": 424, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapadc3c1c4-6d", "ovs_interfaceid": "adc3c1c4-6d99-419c-b176-d3f75d6a908c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2264.358495] env[62684]: DEBUG oslo_concurrency.lockutils [req-4d1344d9-6f42-4d57-88a5-8d2b79527ea0 req-2ccc961e-6c20-4a33-a7b0-3f0371d81015 service nova] Acquired lock "refresh_cache-5b3668f3-219d-4304-bc9e-9b911762085d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2264.358683] env[62684]: DEBUG nova.network.neutron [req-4d1344d9-6f42-4d57-88a5-8d2b79527ea0 req-2ccc961e-6c20-4a33-a7b0-3f0371d81015 service nova] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Refreshing network info cache for port adc3c1c4-6d99-419c-b176-d3f75d6a908c {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2264.359933] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:01:89', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0cc30a16-f070-421c-964e-50c9aa32f17a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'adc3c1c4-6d99-419c-b176-d3f75d6a908c', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2264.367436] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Creating folder: Project (947e7359aaba456fa1763f4dc8e9d359). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2264.368385] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b2b2d1df-44a4-4c81-a3d0-f9b49c04ca17 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.382440] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Created folder: Project (947e7359aaba456fa1763f4dc8e9d359) in parent group-v421118. [ 2264.382643] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Creating folder: Instances. Parent ref: group-v421399. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2264.382952] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b477965b-bd63-4585-9f51-4ca2cad8063b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.392341] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Created folder: Instances in parent group-v421399. [ 2264.392578] env[62684]: DEBUG oslo.service.loopingcall [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2264.392775] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2264.392991] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5554151f-9cfd-4e91-83ac-2431ee885920 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.412165] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2264.412165] env[62684]: value = "task-2053687" [ 2264.412165] env[62684]: _type = "Task" [ 2264.412165] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2264.419997] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053687, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2264.828594] env[62684]: DEBUG nova.network.neutron [-] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2264.854978] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2616ddb1-f9e8-4478-818a-212107bcda30 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.863459] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b1286d5-45ad-4420-b1c3-1a4a6c6d9b24 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.898797] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d41a7f39-d156-440c-bcf4-49786aa496ca {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.907255] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66b98b5f-c290-455a-98ba-aa4d69f25870 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.924052] env[62684]: DEBUG nova.compute.provider_tree [None req-8d28f1fd-4afd-429b-b8d7-13907d227614 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2264.930485] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053687, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2265.211357] env[62684]: DEBUG nova.network.neutron [req-4d1344d9-6f42-4d57-88a5-8d2b79527ea0 req-2ccc961e-6c20-4a33-a7b0-3f0371d81015 service nova] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Updated VIF entry in instance network info cache for port adc3c1c4-6d99-419c-b176-d3f75d6a908c. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2265.211757] env[62684]: DEBUG nova.network.neutron [req-4d1344d9-6f42-4d57-88a5-8d2b79527ea0 req-2ccc961e-6c20-4a33-a7b0-3f0371d81015 service nova] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Updating instance_info_cache with network_info: [{"id": "adc3c1c4-6d99-419c-b176-d3f75d6a908c", "address": "fa:16:3e:8b:01:89", "network": {"id": "8136a664-f757-43b3-a2fa-bacdf2e9566c", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1799567463-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "947e7359aaba456fa1763f4dc8e9d359", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cc30a16-f070-421c-964e-50c9aa32f17a", "external-id": "nsx-vlan-transportzone-424", "segmentation_id": 424, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapadc3c1c4-6d", "ovs_interfaceid": "adc3c1c4-6d99-419c-b176-d3f75d6a908c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2265.333632] env[62684]: INFO nova.compute.manager [-] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Took 1.43 seconds to deallocate network for instance. [ 2265.422760] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053687, 'name': CreateVM_Task, 'duration_secs': 0.836161} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2265.422958] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2265.423609] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2265.423784] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2265.424170] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2265.424492] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d84d82b7-8917-4507-970a-7c8231e9b02a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.429423] env[62684]: DEBUG oslo_vmware.api [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2265.429423] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5274e299-4118-d037-2c41-7bea9b6febcf" [ 2265.429423] env[62684]: _type = "Task" [ 2265.429423] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2265.433033] env[62684]: DEBUG nova.scheduler.client.report [None req-8d28f1fd-4afd-429b-b8d7-13907d227614 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2265.441498] env[62684]: DEBUG oslo_vmware.api [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5274e299-4118-d037-2c41-7bea9b6febcf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2265.714375] env[62684]: DEBUG oslo_concurrency.lockutils [req-4d1344d9-6f42-4d57-88a5-8d2b79527ea0 req-2ccc961e-6c20-4a33-a7b0-3f0371d81015 service nova] Releasing lock "refresh_cache-5b3668f3-219d-4304-bc9e-9b911762085d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2265.714728] env[62684]: DEBUG nova.compute.manager [req-4d1344d9-6f42-4d57-88a5-8d2b79527ea0 req-2ccc961e-6c20-4a33-a7b0-3f0371d81015 service nova] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Received event network-vif-deleted-e2e433b0-9c1a-4fe6-bc77-e1e5416b042a {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2265.839638] env[62684]: DEBUG oslo_concurrency.lockutils [None req-83ea7901-78fb-433f-b59b-13a8fd2e789f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2265.899417] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2265.899659] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2265.899873] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Starting heal instance info cache {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2265.940490] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8d28f1fd-4afd-429b-b8d7-13907d227614 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.699s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2265.942576] env[62684]: DEBUG oslo_vmware.api [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5274e299-4118-d037-2c41-7bea9b6febcf, 'name': SearchDatastore_Task, 'duration_secs': 0.012615} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2265.943052] env[62684]: DEBUG oslo_concurrency.lockutils [None req-83ea7901-78fb-433f-b59b-13a8fd2e789f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.104s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2265.943296] env[62684]: DEBUG nova.objects.instance [None req-83ea7901-78fb-433f-b59b-13a8fd2e789f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lazy-loading 'resources' on Instance uuid 68ed9549-14ab-4f90-bd78-925f289dc029 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2265.944327] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2265.944559] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2265.944794] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2265.944948] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2265.945146] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2265.945625] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-02c7b90a-38ef-494b-a5f7-dabace0c6a53 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.956059] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2265.956251] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2265.956989] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ef7e4f9-ed70-4bcd-bca0-a81744e8e6b4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.963063] env[62684]: DEBUG oslo_vmware.api [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2265.963063] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5239779c-17a2-a591-ee9f-71730bd8dca9" [ 2265.963063] env[62684]: _type = "Task" [ 2265.963063] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2265.963956] env[62684]: INFO nova.scheduler.client.report [None req-8d28f1fd-4afd-429b-b8d7-13907d227614 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Deleted allocations for instance c87b2875-ae05-4091-93fe-7b33d4ca864b [ 2265.976034] env[62684]: DEBUG oslo_vmware.api [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5239779c-17a2-a591-ee9f-71730bd8dca9, 'name': SearchDatastore_Task, 'duration_secs': 0.010161} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2265.976793] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2faefe0-52a4-4f27-b102-f0664ade289a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.982471] env[62684]: DEBUG oslo_vmware.api [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2265.982471] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]527573df-b9b1-8da5-00d8-0f4c9de82484" [ 2265.982471] env[62684]: _type = "Task" [ 2265.982471] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2265.992829] env[62684]: DEBUG oslo_vmware.api [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]527573df-b9b1-8da5-00d8-0f4c9de82484, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2266.239210] env[62684]: DEBUG nova.compute.manager [req-31303856-ed9a-42e8-941b-b685ad0ede7c req-a97a8998-068f-4567-90f2-6bdf0605f6d9 service nova] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Received event network-vif-deleted-22389f73-0c07-4171-893b-941798deca07 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2266.473804] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8d28f1fd-4afd-429b-b8d7-13907d227614 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "c87b2875-ae05-4091-93fe-7b33d4ca864b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.790s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2266.494387] env[62684]: DEBUG oslo_vmware.api [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]527573df-b9b1-8da5-00d8-0f4c9de82484, 'name': SearchDatastore_Task, 'duration_secs': 0.010688} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2266.494679] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2266.494905] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 5b3668f3-219d-4304-bc9e-9b911762085d/5b3668f3-219d-4304-bc9e-9b911762085d.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2266.495562] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5c8fdf58-439b-4b91-b88b-3565b6174192 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.507059] env[62684]: DEBUG oslo_vmware.api [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2266.507059] env[62684]: value = "task-2053688" [ 2266.507059] env[62684]: _type = "Task" [ 2266.507059] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2266.515777] env[62684]: DEBUG oslo_vmware.api [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053688, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2266.553148] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f64f78be-ad43-4a9c-a198-2c33a1128976 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.562071] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0112caad-b246-46bc-a141-2ca737d486a4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.592734] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e17e26df-7eee-4fd3-ac2b-27258ac57bd6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.601348] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b61f171b-dbb9-4610-9903-46a49de44756 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.617288] env[62684]: DEBUG nova.compute.provider_tree [None req-83ea7901-78fb-433f-b59b-13a8fd2e789f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2266.935615] env[62684]: DEBUG oslo_concurrency.lockutils [None req-04eaabb6-c35f-4896-bc66-f57c735e6d50 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "587edf89-2ea0-4b89-8830-fa766b798398" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2266.935976] env[62684]: DEBUG oslo_concurrency.lockutils [None req-04eaabb6-c35f-4896-bc66-f57c735e6d50 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "587edf89-2ea0-4b89-8830-fa766b798398" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2267.017947] env[62684]: DEBUG oslo_vmware.api [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053688, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.446235} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2267.018356] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 5b3668f3-219d-4304-bc9e-9b911762085d/5b3668f3-219d-4304-bc9e-9b911762085d.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2267.018605] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2267.018864] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-57a63e4d-9202-42e8-a1e7-1cd8c34a0efa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.026455] env[62684]: DEBUG oslo_vmware.api [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2267.026455] env[62684]: value = "task-2053689" [ 2267.026455] env[62684]: _type = "Task" [ 2267.026455] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2267.034635] env[62684]: DEBUG oslo_vmware.api [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053689, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2267.120251] env[62684]: DEBUG nova.scheduler.client.report [None req-83ea7901-78fb-433f-b59b-13a8fd2e789f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2267.439180] env[62684]: DEBUG nova.compute.utils [None req-04eaabb6-c35f-4896-bc66-f57c735e6d50 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2267.453537] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "refresh_cache-57537508-06e7-43a4-95c5-c4399b8bf93f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2267.453690] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired lock "refresh_cache-57537508-06e7-43a4-95c5-c4399b8bf93f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2267.453839] env[62684]: DEBUG nova.network.neutron [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Forcefully refreshing network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2267.537142] env[62684]: DEBUG oslo_vmware.api [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053689, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067638} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2267.537420] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2267.538252] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-897b674c-9b42-4a1f-90a8-f550ea814651 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.561186] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] 5b3668f3-219d-4304-bc9e-9b911762085d/5b3668f3-219d-4304-bc9e-9b911762085d.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2267.561510] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d940891-8923-4ef1-ae87-4e5aa605be64 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.582550] env[62684]: DEBUG oslo_vmware.api [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2267.582550] env[62684]: value = "task-2053690" [ 2267.582550] env[62684]: _type = "Task" [ 2267.582550] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2267.590860] env[62684]: DEBUG oslo_vmware.api [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053690, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2267.625436] env[62684]: DEBUG oslo_concurrency.lockutils [None req-83ea7901-78fb-433f-b59b-13a8fd2e789f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.682s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2267.642942] env[62684]: INFO nova.scheduler.client.report [None req-83ea7901-78fb-433f-b59b-13a8fd2e789f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Deleted allocations for instance 68ed9549-14ab-4f90-bd78-925f289dc029 [ 2267.942590] env[62684]: DEBUG oslo_concurrency.lockutils [None req-04eaabb6-c35f-4896-bc66-f57c735e6d50 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "587edf89-2ea0-4b89-8830-fa766b798398" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2268.002140] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "b262673b-e4d3-48d8-9f93-6c60d48ae29d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2268.002374] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "b262673b-e4d3-48d8-9f93-6c60d48ae29d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2268.093679] env[62684]: DEBUG oslo_vmware.api [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053690, 'name': ReconfigVM_Task, 'duration_secs': 0.289281} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2268.093977] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Reconfigured VM instance instance-0000006c to attach disk [datastore1] 5b3668f3-219d-4304-bc9e-9b911762085d/5b3668f3-219d-4304-bc9e-9b911762085d.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2268.094650] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7c56a9fb-948c-4c92-a989-a88ed5bae32f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.101974] env[62684]: DEBUG oslo_vmware.api [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2268.101974] env[62684]: value = "task-2053691" [ 2268.101974] env[62684]: _type = "Task" [ 2268.101974] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2268.109955] env[62684]: DEBUG oslo_vmware.api [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053691, 'name': Rename_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2268.150152] env[62684]: DEBUG oslo_concurrency.lockutils [None req-83ea7901-78fb-433f-b59b-13a8fd2e789f tempest-ServerDiskConfigTestJSON-2081969978 tempest-ServerDiskConfigTestJSON-2081969978-project-member] Lock "68ed9549-14ab-4f90-bd78-925f289dc029" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.597s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2268.505589] env[62684]: DEBUG nova.compute.manager [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2268.611892] env[62684]: DEBUG oslo_vmware.api [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053691, 'name': Rename_Task, 'duration_secs': 0.177307} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2268.612222] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2268.612467] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-06acada7-0cec-4715-b9a6-d858c07d5326 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.621941] env[62684]: DEBUG oslo_vmware.api [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2268.621941] env[62684]: value = "task-2053692" [ 2268.621941] env[62684]: _type = "Task" [ 2268.621941] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2268.630192] env[62684]: DEBUG oslo_vmware.api [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053692, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2268.786719] env[62684]: DEBUG nova.network.neutron [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Updating instance_info_cache with network_info: [{"id": "0a96e2ce-2335-44e2-940d-26d3afbafa3a", "address": "fa:16:3e:6d:b8:02", "network": {"id": "1751424b-54a9-4879-9f32-aa15a9bb632c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-120070593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "263c101fcc5e493789b79dfd1ba97cc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92e4d027-e755-417b-8eea-9a8f24b85140", "external-id": "nsx-vlan-transportzone-756", "segmentation_id": 756, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a96e2ce-23", "ovs_interfaceid": "0a96e2ce-2335-44e2-940d-26d3afbafa3a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2269.014903] env[62684]: DEBUG oslo_concurrency.lockutils [None req-04eaabb6-c35f-4896-bc66-f57c735e6d50 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "587edf89-2ea0-4b89-8830-fa766b798398" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2269.015447] env[62684]: DEBUG oslo_concurrency.lockutils [None req-04eaabb6-c35f-4896-bc66-f57c735e6d50 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "587edf89-2ea0-4b89-8830-fa766b798398" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2269.015532] env[62684]: INFO nova.compute.manager [None req-04eaabb6-c35f-4896-bc66-f57c735e6d50 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Attaching volume bd2d9701-64ca-4c51-b6f2-382756dbec34 to /dev/sdb [ 2269.034396] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2269.034586] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2269.036207] env[62684]: INFO nova.compute.claims [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2269.050630] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-258882ce-eb61-44f3-904f-124ff98b015c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2269.060886] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bf66165-5057-41ef-ac6f-7b68c82a5326 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2269.077771] env[62684]: DEBUG nova.virt.block_device [None req-04eaabb6-c35f-4896-bc66-f57c735e6d50 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Updating existing volume attachment record: a205a906-3626-46f2-ae94-56cb3e79b688 {{(pid=62684) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2269.132918] env[62684]: DEBUG oslo_vmware.api [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053692, 'name': PowerOnVM_Task, 'duration_secs': 0.488121} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2269.132918] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2269.132918] env[62684]: INFO nova.compute.manager [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Took 14.43 seconds to spawn the instance on the hypervisor. [ 2269.132918] env[62684]: DEBUG nova.compute.manager [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2269.133776] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff9a0594-65b4-4bff-9b81-0d4162137b5c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2269.290655] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Releasing lock "refresh_cache-57537508-06e7-43a4-95c5-c4399b8bf93f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2269.290882] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Updated the network info_cache for instance {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 2269.292703] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2269.292703] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2269.292703] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2269.292883] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2269.292920] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2269.293448] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2269.293448] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62684) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2269.293448] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2269.651958] env[62684]: INFO nova.compute.manager [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Took 19.25 seconds to build instance. [ 2269.796514] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2270.154172] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cf0e5de2-d224-47fa-bd25-2616e91b1e22 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "5b3668f3-219d-4304-bc9e-9b911762085d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.778s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2270.172172] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad96d060-f0f3-4189-b595-54f4abe77f72 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.181651] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70d859f9-bae6-4e33-a9c7-96027e3df68c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.216946] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d95f023-9cba-4ba8-8f21-2ced5c35e4cf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.225843] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3605ea1c-8298-45b0-801c-b2bb6e03b634 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.239955] env[62684]: DEBUG nova.compute.provider_tree [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2270.243009] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Acquiring lock "9f1e9ae9-c082-4fbe-bd21-6e14e40962c1" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2270.243248] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Lock "9f1e9ae9-c082-4fbe-bd21-6e14e40962c1" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2270.243425] env[62684]: INFO nova.compute.manager [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Shelving [ 2270.538828] env[62684]: DEBUG nova.compute.manager [req-967e2a6b-1c72-44d6-a138-0ae59f8e267e req-7a64eceb-9f44-45c7-8b3d-9eb0b8e208c6 service nova] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Received event network-changed-adc3c1c4-6d99-419c-b176-d3f75d6a908c {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2270.539055] env[62684]: DEBUG nova.compute.manager [req-967e2a6b-1c72-44d6-a138-0ae59f8e267e req-7a64eceb-9f44-45c7-8b3d-9eb0b8e208c6 service nova] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Refreshing instance network info cache due to event network-changed-adc3c1c4-6d99-419c-b176-d3f75d6a908c. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2270.539275] env[62684]: DEBUG oslo_concurrency.lockutils [req-967e2a6b-1c72-44d6-a138-0ae59f8e267e req-7a64eceb-9f44-45c7-8b3d-9eb0b8e208c6 service nova] Acquiring lock "refresh_cache-5b3668f3-219d-4304-bc9e-9b911762085d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2270.539420] env[62684]: DEBUG oslo_concurrency.lockutils [req-967e2a6b-1c72-44d6-a138-0ae59f8e267e req-7a64eceb-9f44-45c7-8b3d-9eb0b8e208c6 service nova] Acquired lock "refresh_cache-5b3668f3-219d-4304-bc9e-9b911762085d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2270.539582] env[62684]: DEBUG nova.network.neutron [req-967e2a6b-1c72-44d6-a138-0ae59f8e267e req-7a64eceb-9f44-45c7-8b3d-9eb0b8e208c6 service nova] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Refreshing network info cache for port adc3c1c4-6d99-419c-b176-d3f75d6a908c {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2270.747019] env[62684]: DEBUG nova.scheduler.client.report [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2270.752888] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2270.753184] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f6ce8336-0e68-4257-8b62-0173b85b003b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.765674] env[62684]: DEBUG oslo_vmware.api [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for the task: (returnval){ [ 2270.765674] env[62684]: value = "task-2053696" [ 2270.765674] env[62684]: _type = "Task" [ 2270.765674] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2270.774624] env[62684]: DEBUG oslo_vmware.api [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053696, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2271.251650] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.217s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2271.252455] env[62684]: DEBUG nova.compute.manager [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2271.254933] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.459s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2271.255132] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2271.255282] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62684) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2271.256327] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d8c218a-9acc-46f3-ba07-c275cfe27147 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.264917] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-754e5947-d842-45ce-b0ec-c27db9c3668d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.278079] env[62684]: DEBUG oslo_vmware.api [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053696, 'name': PowerOffVM_Task, 'duration_secs': 0.391179} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2271.286048] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2271.287188] env[62684]: DEBUG nova.network.neutron [req-967e2a6b-1c72-44d6-a138-0ae59f8e267e req-7a64eceb-9f44-45c7-8b3d-9eb0b8e208c6 service nova] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Updated VIF entry in instance network info cache for port adc3c1c4-6d99-419c-b176-d3f75d6a908c. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2271.287540] env[62684]: DEBUG nova.network.neutron [req-967e2a6b-1c72-44d6-a138-0ae59f8e267e req-7a64eceb-9f44-45c7-8b3d-9eb0b8e208c6 service nova] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Updating instance_info_cache with network_info: [{"id": "adc3c1c4-6d99-419c-b176-d3f75d6a908c", "address": "fa:16:3e:8b:01:89", "network": {"id": "8136a664-f757-43b3-a2fa-bacdf2e9566c", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1799567463-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "947e7359aaba456fa1763f4dc8e9d359", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cc30a16-f070-421c-964e-50c9aa32f17a", "external-id": "nsx-vlan-transportzone-424", "segmentation_id": 424, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapadc3c1c4-6d", "ovs_interfaceid": "adc3c1c4-6d99-419c-b176-d3f75d6a908c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2271.289310] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65de6042-35bf-41ca-b827-9bdd105ccf85 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.292707] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dfe880a-8ded-4cc3-8ccb-f6feaed1ae64 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.315085] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03859974-822b-4ffa-ae50-9d04f5d818ed {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.319038] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4422d66-7bcf-49a7-818a-45e112f04523 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.353099] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179389MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=62684) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2271.353289] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2271.353449] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2271.757759] env[62684]: DEBUG nova.compute.utils [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2271.759779] env[62684]: DEBUG nova.compute.manager [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2271.759779] env[62684]: DEBUG nova.network.neutron [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2271.797428] env[62684]: DEBUG oslo_concurrency.lockutils [req-967e2a6b-1c72-44d6-a138-0ae59f8e267e req-7a64eceb-9f44-45c7-8b3d-9eb0b8e208c6 service nova] Releasing lock "refresh_cache-5b3668f3-219d-4304-bc9e-9b911762085d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2271.847894] env[62684]: DEBUG nova.policy [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '96b96927115d49f2a04342784717e58e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '607a0aa1049640d882d7dd490f5f98ea', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2271.857093] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Creating Snapshot of the VM instance {{(pid=62684) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2271.861386] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-0e96b84c-758e-4b64-82a0-a7fd0ab52859 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.870236] env[62684]: DEBUG oslo_vmware.api [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for the task: (returnval){ [ 2271.870236] env[62684]: value = "task-2053698" [ 2271.870236] env[62684]: _type = "Task" [ 2271.870236] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2271.880635] env[62684]: DEBUG oslo_vmware.api [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053698, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2272.263105] env[62684]: DEBUG nova.compute.manager [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2272.381490] env[62684]: DEBUG oslo_vmware.api [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053698, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2272.387792] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 57537508-06e7-43a4-95c5-c4399b8bf93f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2272.387959] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 0156d807-1ab4-482f-91d1-172bf32bf23c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2272.388142] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance ba12fa9a-10e3-4624-98b5-4ff7365e1940 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2272.388265] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 587edf89-2ea0-4b89-8830-fa766b798398 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2272.388396] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2272.388560] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 5b3668f3-219d-4304-bc9e-9b911762085d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2272.388676] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance b262673b-e4d3-48d8-9f93-6c60d48ae29d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2272.388877] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2272.389065] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2272.495677] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2818548-c5f1-4cdf-bf2c-c34a70e235bc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2272.504432] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abc6d0a3-2b71-453f-a26e-64779b6dc5f0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2272.542425] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4eb00c7-9b19-48cb-b76b-760efdac703b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2272.552248] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0175447-1039-4907-924a-59fb8ade601a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2272.567112] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2272.601956] env[62684]: DEBUG nova.network.neutron [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Successfully created port: 38cf4164-d6e1-4c83-a587-36f2e19b13e3 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2272.881201] env[62684]: DEBUG oslo_vmware.api [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053698, 'name': CreateSnapshot_Task, 'duration_secs': 0.691981} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2272.881522] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Created Snapshot of the VM instance {{(pid=62684) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2272.882477] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53a91151-f445-4ef6-abaa-df8a6d8f2fa7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2273.070214] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2273.275985] env[62684]: DEBUG nova.compute.manager [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2273.304718] env[62684]: DEBUG nova.virt.hardware [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2273.305435] env[62684]: DEBUG nova.virt.hardware [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2273.305435] env[62684]: DEBUG nova.virt.hardware [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2273.305435] env[62684]: DEBUG nova.virt.hardware [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2273.305533] env[62684]: DEBUG nova.virt.hardware [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2273.305635] env[62684]: DEBUG nova.virt.hardware [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2273.307290] env[62684]: DEBUG nova.virt.hardware [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2273.307555] env[62684]: DEBUG nova.virt.hardware [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2273.307758] env[62684]: DEBUG nova.virt.hardware [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2273.307945] env[62684]: DEBUG nova.virt.hardware [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2273.308144] env[62684]: DEBUG nova.virt.hardware [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2273.313053] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01dcf0ef-d7de-4341-99ae-9b8d96262179 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2273.322408] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1463e635-cd67-4238-961c-2de76eb0b4c4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2273.401427] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Creating linked-clone VM from snapshot {{(pid=62684) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2273.401760] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-79b140cd-c775-46f3-863a-83466cb53a98 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2273.412167] env[62684]: DEBUG oslo_vmware.api [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for the task: (returnval){ [ 2273.412167] env[62684]: value = "task-2053699" [ 2273.412167] env[62684]: _type = "Task" [ 2273.412167] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2273.421020] env[62684]: DEBUG oslo_vmware.api [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053699, 'name': CloneVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2273.574836] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62684) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2273.575070] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.222s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2273.924354] env[62684]: DEBUG oslo_vmware.api [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053699, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2274.127218] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-04eaabb6-c35f-4896-bc66-f57c735e6d50 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Volume attach. Driver type: vmdk {{(pid=62684) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2274.127487] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-04eaabb6-c35f-4896-bc66-f57c735e6d50 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421403', 'volume_id': 'bd2d9701-64ca-4c51-b6f2-382756dbec34', 'name': 'volume-bd2d9701-64ca-4c51-b6f2-382756dbec34', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '587edf89-2ea0-4b89-8830-fa766b798398', 'attached_at': '', 'detached_at': '', 'volume_id': 'bd2d9701-64ca-4c51-b6f2-382756dbec34', 'serial': 'bd2d9701-64ca-4c51-b6f2-382756dbec34'} {{(pid=62684) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2274.128622] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ac95284-c065-489d-acfa-b30e748fc246 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.151147] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1633998b-8469-4d1d-88cd-315cfd2c6048 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.181101] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-04eaabb6-c35f-4896-bc66-f57c735e6d50 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] volume-bd2d9701-64ca-4c51-b6f2-382756dbec34/volume-bd2d9701-64ca-4c51-b6f2-382756dbec34.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2274.181443] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f8f6380-7c30-49d1-9dea-b99ef214e380 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.207105] env[62684]: DEBUG oslo_vmware.api [None req-04eaabb6-c35f-4896-bc66-f57c735e6d50 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2274.207105] env[62684]: value = "task-2053700" [ 2274.207105] env[62684]: _type = "Task" [ 2274.207105] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2274.218876] env[62684]: DEBUG oslo_vmware.api [None req-04eaabb6-c35f-4896-bc66-f57c735e6d50 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053700, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2274.421109] env[62684]: DEBUG nova.compute.manager [req-1d977bb2-3b46-4d76-ab0e-58dee173f7b2 req-a4db7278-fffa-47d7-a961-622204350028 service nova] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Received event network-vif-plugged-38cf4164-d6e1-4c83-a587-36f2e19b13e3 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2274.421109] env[62684]: DEBUG oslo_concurrency.lockutils [req-1d977bb2-3b46-4d76-ab0e-58dee173f7b2 req-a4db7278-fffa-47d7-a961-622204350028 service nova] Acquiring lock "b262673b-e4d3-48d8-9f93-6c60d48ae29d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2274.421981] env[62684]: DEBUG oslo_concurrency.lockutils [req-1d977bb2-3b46-4d76-ab0e-58dee173f7b2 req-a4db7278-fffa-47d7-a961-622204350028 service nova] Lock "b262673b-e4d3-48d8-9f93-6c60d48ae29d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2274.422328] env[62684]: DEBUG oslo_concurrency.lockutils [req-1d977bb2-3b46-4d76-ab0e-58dee173f7b2 req-a4db7278-fffa-47d7-a961-622204350028 service nova] Lock "b262673b-e4d3-48d8-9f93-6c60d48ae29d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2274.422652] env[62684]: DEBUG nova.compute.manager [req-1d977bb2-3b46-4d76-ab0e-58dee173f7b2 req-a4db7278-fffa-47d7-a961-622204350028 service nova] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] No waiting events found dispatching network-vif-plugged-38cf4164-d6e1-4c83-a587-36f2e19b13e3 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2274.422963] env[62684]: WARNING nova.compute.manager [req-1d977bb2-3b46-4d76-ab0e-58dee173f7b2 req-a4db7278-fffa-47d7-a961-622204350028 service nova] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Received unexpected event network-vif-plugged-38cf4164-d6e1-4c83-a587-36f2e19b13e3 for instance with vm_state building and task_state spawning. [ 2274.436965] env[62684]: DEBUG oslo_vmware.api [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053699, 'name': CloneVM_Task} progress is 95%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2274.522684] env[62684]: DEBUG nova.network.neutron [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Successfully updated port: 38cf4164-d6e1-4c83-a587-36f2e19b13e3 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2274.717968] env[62684]: DEBUG oslo_vmware.api [None req-04eaabb6-c35f-4896-bc66-f57c735e6d50 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053700, 'name': ReconfigVM_Task, 'duration_secs': 0.403651} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2274.718298] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-04eaabb6-c35f-4896-bc66-f57c735e6d50 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Reconfigured VM instance instance-00000067 to attach disk [datastore1] volume-bd2d9701-64ca-4c51-b6f2-382756dbec34/volume-bd2d9701-64ca-4c51-b6f2-382756dbec34.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2274.722932] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d7c3a857-ec0d-459d-947e-88dced6a2b92 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.739730] env[62684]: DEBUG oslo_vmware.api [None req-04eaabb6-c35f-4896-bc66-f57c735e6d50 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2274.739730] env[62684]: value = "task-2053701" [ 2274.739730] env[62684]: _type = "Task" [ 2274.739730] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2274.748541] env[62684]: DEBUG oslo_vmware.api [None req-04eaabb6-c35f-4896-bc66-f57c735e6d50 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053701, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2274.924342] env[62684]: DEBUG oslo_vmware.api [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053699, 'name': CloneVM_Task, 'duration_secs': 1.153058} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2274.924651] env[62684]: INFO nova.virt.vmwareapi.vmops [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Created linked-clone VM from snapshot [ 2274.925418] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cccb63da-65e3-4b76-afc4-40ebd47a339f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.934261] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Uploading image a08ef668-cfc2-427d-9c76-1c1bbb64d819 {{(pid=62684) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2274.951199] env[62684]: DEBUG oslo_concurrency.lockutils [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Acquiring lock "a9965f71-e965-4144-a64a-6ee43ad20fc0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2274.951437] env[62684]: DEBUG oslo_concurrency.lockutils [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Lock "a9965f71-e965-4144-a64a-6ee43ad20fc0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2274.961079] env[62684]: DEBUG oslo_vmware.rw_handles [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2274.961079] env[62684]: value = "vm-421405" [ 2274.961079] env[62684]: _type = "VirtualMachine" [ 2274.961079] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2274.961345] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-e3fdeec7-2dce-4548-8a57-35ba90096fdf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.970283] env[62684]: DEBUG oslo_vmware.rw_handles [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Lease: (returnval){ [ 2274.970283] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5281a955-4543-0d51-9018-c001580efff7" [ 2274.970283] env[62684]: _type = "HttpNfcLease" [ 2274.970283] env[62684]: } obtained for exporting VM: (result){ [ 2274.970283] env[62684]: value = "vm-421405" [ 2274.970283] env[62684]: _type = "VirtualMachine" [ 2274.970283] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2274.970533] env[62684]: DEBUG oslo_vmware.api [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for the lease: (returnval){ [ 2274.970533] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5281a955-4543-0d51-9018-c001580efff7" [ 2274.970533] env[62684]: _type = "HttpNfcLease" [ 2274.970533] env[62684]: } to be ready. {{(pid=62684) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2274.977804] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2274.977804] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5281a955-4543-0d51-9018-c001580efff7" [ 2274.977804] env[62684]: _type = "HttpNfcLease" [ 2274.977804] env[62684]: } is initializing. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2275.029751] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "refresh_cache-b262673b-e4d3-48d8-9f93-6c60d48ae29d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2275.029751] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquired lock "refresh_cache-b262673b-e4d3-48d8-9f93-6c60d48ae29d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2275.030067] env[62684]: DEBUG nova.network.neutron [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2275.251700] env[62684]: DEBUG oslo_vmware.api [None req-04eaabb6-c35f-4896-bc66-f57c735e6d50 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053701, 'name': ReconfigVM_Task, 'duration_secs': 0.141991} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2275.252048] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-04eaabb6-c35f-4896-bc66-f57c735e6d50 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421403', 'volume_id': 'bd2d9701-64ca-4c51-b6f2-382756dbec34', 'name': 'volume-bd2d9701-64ca-4c51-b6f2-382756dbec34', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '587edf89-2ea0-4b89-8830-fa766b798398', 'attached_at': '', 'detached_at': '', 'volume_id': 'bd2d9701-64ca-4c51-b6f2-382756dbec34', 'serial': 'bd2d9701-64ca-4c51-b6f2-382756dbec34'} {{(pid=62684) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2275.456380] env[62684]: DEBUG nova.compute.manager [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2275.479694] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2275.479694] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5281a955-4543-0d51-9018-c001580efff7" [ 2275.479694] env[62684]: _type = "HttpNfcLease" [ 2275.479694] env[62684]: } is ready. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2275.480079] env[62684]: DEBUG oslo_vmware.rw_handles [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2275.480079] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5281a955-4543-0d51-9018-c001580efff7" [ 2275.480079] env[62684]: _type = "HttpNfcLease" [ 2275.480079] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2275.480871] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d61f5b19-6bd0-4a73-912b-099a44c11025 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2275.488946] env[62684]: DEBUG oslo_vmware.rw_handles [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525c53f4-15e5-5cee-5133-8edeaaa03521/disk-0.vmdk from lease info. {{(pid=62684) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2275.490159] env[62684]: DEBUG oslo_vmware.rw_handles [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525c53f4-15e5-5cee-5133-8edeaaa03521/disk-0.vmdk for reading. {{(pid=62684) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2275.576453] env[62684]: DEBUG nova.network.neutron [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2275.580689] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b62032ec-cbf1-41c8-be3f-dcde412a49d6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2275.713161] env[62684]: DEBUG nova.network.neutron [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Updating instance_info_cache with network_info: [{"id": "38cf4164-d6e1-4c83-a587-36f2e19b13e3", "address": "fa:16:3e:62:32:52", "network": {"id": "b24dd0c0-a394-4ca6-a79a-94535bc1df6f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2023102141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "607a0aa1049640d882d7dd490f5f98ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38cf4164-d6", "ovs_interfaceid": "38cf4164-d6e1-4c83-a587-36f2e19b13e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2275.972397] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2275.979765] env[62684]: DEBUG oslo_concurrency.lockutils [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2275.979765] env[62684]: DEBUG oslo_concurrency.lockutils [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2275.981397] env[62684]: INFO nova.compute.claims [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2276.215802] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Releasing lock "refresh_cache-b262673b-e4d3-48d8-9f93-6c60d48ae29d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2276.216233] env[62684]: DEBUG nova.compute.manager [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Instance network_info: |[{"id": "38cf4164-d6e1-4c83-a587-36f2e19b13e3", "address": "fa:16:3e:62:32:52", "network": {"id": "b24dd0c0-a394-4ca6-a79a-94535bc1df6f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2023102141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "607a0aa1049640d882d7dd490f5f98ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38cf4164-d6", "ovs_interfaceid": "38cf4164-d6e1-4c83-a587-36f2e19b13e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2276.216719] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:62:32:52', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '38cf4164-d6e1-4c83-a587-36f2e19b13e3', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2276.226136] env[62684]: DEBUG oslo.service.loopingcall [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2276.226447] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2276.226947] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b23f777a-d9f4-427e-826b-420f5cd8bc46 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.249772] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2276.249772] env[62684]: value = "task-2053703" [ 2276.249772] env[62684]: _type = "Task" [ 2276.249772] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2276.258769] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053703, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2276.296461] env[62684]: DEBUG nova.objects.instance [None req-04eaabb6-c35f-4896-bc66-f57c735e6d50 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lazy-loading 'flavor' on Instance uuid 587edf89-2ea0-4b89-8830-fa766b798398 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2276.446103] env[62684]: DEBUG nova.compute.manager [req-e1f15644-0b45-430b-a021-f4d06d9eaf63 req-fab4e547-5525-445f-bcd4-95c3b29fafec service nova] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Received event network-changed-38cf4164-d6e1-4c83-a587-36f2e19b13e3 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2276.446524] env[62684]: DEBUG nova.compute.manager [req-e1f15644-0b45-430b-a021-f4d06d9eaf63 req-fab4e547-5525-445f-bcd4-95c3b29fafec service nova] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Refreshing instance network info cache due to event network-changed-38cf4164-d6e1-4c83-a587-36f2e19b13e3. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2276.446801] env[62684]: DEBUG oslo_concurrency.lockutils [req-e1f15644-0b45-430b-a021-f4d06d9eaf63 req-fab4e547-5525-445f-bcd4-95c3b29fafec service nova] Acquiring lock "refresh_cache-b262673b-e4d3-48d8-9f93-6c60d48ae29d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2276.447053] env[62684]: DEBUG oslo_concurrency.lockutils [req-e1f15644-0b45-430b-a021-f4d06d9eaf63 req-fab4e547-5525-445f-bcd4-95c3b29fafec service nova] Acquired lock "refresh_cache-b262673b-e4d3-48d8-9f93-6c60d48ae29d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2276.447619] env[62684]: DEBUG nova.network.neutron [req-e1f15644-0b45-430b-a021-f4d06d9eaf63 req-fab4e547-5525-445f-bcd4-95c3b29fafec service nova] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Refreshing network info cache for port 38cf4164-d6e1-4c83-a587-36f2e19b13e3 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2276.762518] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053703, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2276.801598] env[62684]: DEBUG oslo_concurrency.lockutils [None req-04eaabb6-c35f-4896-bc66-f57c735e6d50 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "587edf89-2ea0-4b89-8830-fa766b798398" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.786s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2277.675270] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053703, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2277.785145] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-141ccff3-0cee-41ce-bb92-1894091d1677 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.793580] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-510c2772-9ef3-4e96-bde8-987dc812bc4c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.827464] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15d61eb8-3dc5-44b9-98b2-171d436f10a8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.838235] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5abdff37-bbe0-4f17-8a2c-87abee5a73ba {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.852821] env[62684]: DEBUG nova.compute.provider_tree [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2277.935314] env[62684]: DEBUG nova.network.neutron [req-e1f15644-0b45-430b-a021-f4d06d9eaf63 req-fab4e547-5525-445f-bcd4-95c3b29fafec service nova] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Updated VIF entry in instance network info cache for port 38cf4164-d6e1-4c83-a587-36f2e19b13e3. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2277.935714] env[62684]: DEBUG nova.network.neutron [req-e1f15644-0b45-430b-a021-f4d06d9eaf63 req-fab4e547-5525-445f-bcd4-95c3b29fafec service nova] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Updating instance_info_cache with network_info: [{"id": "38cf4164-d6e1-4c83-a587-36f2e19b13e3", "address": "fa:16:3e:62:32:52", "network": {"id": "b24dd0c0-a394-4ca6-a79a-94535bc1df6f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2023102141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "607a0aa1049640d882d7dd490f5f98ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38cf4164-d6", "ovs_interfaceid": "38cf4164-d6e1-4c83-a587-36f2e19b13e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2278.170507] env[62684]: INFO nova.compute.manager [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Rebuilding instance [ 2278.172494] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053703, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2278.218424] env[62684]: DEBUG nova.compute.manager [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2278.219336] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f392e03-ac71-4659-886e-e7be081ef9a8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2278.355760] env[62684]: DEBUG nova.scheduler.client.report [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2278.439634] env[62684]: DEBUG oslo_concurrency.lockutils [req-e1f15644-0b45-430b-a021-f4d06d9eaf63 req-fab4e547-5525-445f-bcd4-95c3b29fafec service nova] Releasing lock "refresh_cache-b262673b-e4d3-48d8-9f93-6c60d48ae29d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2278.670957] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053703, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2278.732549] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2278.732877] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c8933019-a8f8-4416-bf16-558bd2982ec3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2278.741666] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2278.741666] env[62684]: value = "task-2053704" [ 2278.741666] env[62684]: _type = "Task" [ 2278.741666] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2278.750882] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053704, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2278.862430] env[62684]: DEBUG oslo_concurrency.lockutils [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.882s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2278.863132] env[62684]: DEBUG nova.compute.manager [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2279.173257] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053703, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2279.252578] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053704, 'name': PowerOffVM_Task, 'duration_secs': 0.22384} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2279.252865] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2279.308051] env[62684]: INFO nova.compute.manager [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Detaching volume bd2d9701-64ca-4c51-b6f2-382756dbec34 [ 2279.341828] env[62684]: INFO nova.virt.block_device [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Attempting to driver detach volume bd2d9701-64ca-4c51-b6f2-382756dbec34 from mountpoint /dev/sdb [ 2279.342106] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Volume detach. Driver type: vmdk {{(pid=62684) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2279.342384] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421403', 'volume_id': 'bd2d9701-64ca-4c51-b6f2-382756dbec34', 'name': 'volume-bd2d9701-64ca-4c51-b6f2-382756dbec34', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '587edf89-2ea0-4b89-8830-fa766b798398', 'attached_at': '', 'detached_at': '', 'volume_id': 'bd2d9701-64ca-4c51-b6f2-382756dbec34', 'serial': 'bd2d9701-64ca-4c51-b6f2-382756dbec34'} {{(pid=62684) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2279.343269] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-748df594-e5a1-451d-82d0-b593572df92f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.365101] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-425c8ebe-34ac-4e59-8aad-95dfaf802e5f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.368777] env[62684]: DEBUG nova.compute.utils [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2279.370133] env[62684]: DEBUG nova.compute.manager [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2279.370300] env[62684]: DEBUG nova.network.neutron [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2279.377529] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffc07824-174e-442a-8333-cac3b172bb6a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.400089] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeeeac42-70d1-46f4-b6fb-70b049aca8fd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.416177] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] The volume has not been displaced from its original location: [datastore1] volume-bd2d9701-64ca-4c51-b6f2-382756dbec34/volume-bd2d9701-64ca-4c51-b6f2-382756dbec34.vmdk. No consolidation needed. {{(pid=62684) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2279.421444] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Reconfiguring VM instance instance-00000067 to detach disk 2001 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2279.423394] env[62684]: DEBUG nova.policy [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '036892d8a801446eb335aaa3a25479f0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0ee666ab952b42bd84461e4c3a76e6d9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2279.425033] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-64d72c5f-0cd6-42b6-a99b-f1d463d37864 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.447837] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2279.447837] env[62684]: value = "task-2053705" [ 2279.447837] env[62684]: _type = "Task" [ 2279.447837] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2279.457890] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053705, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2279.673222] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053703, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2279.777270] env[62684]: DEBUG nova.network.neutron [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Successfully created port: 3a0c1e2a-c83c-475e-889b-d951a26f7708 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2279.876024] env[62684]: DEBUG nova.compute.manager [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2279.959762] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053705, 'name': ReconfigVM_Task, 'duration_secs': 0.246439} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2279.960299] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Reconfigured VM instance instance-00000067 to detach disk 2001 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2279.966128] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-87338c01-55c4-4f7e-96a0-af3fa897c43d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.988374] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2279.988374] env[62684]: value = "task-2053706" [ 2279.988374] env[62684]: _type = "Task" [ 2279.988374] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2280.000767] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053706, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2280.174652] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053703, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2280.499352] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053706, 'name': ReconfigVM_Task, 'duration_secs': 0.219494} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2280.500515] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421403', 'volume_id': 'bd2d9701-64ca-4c51-b6f2-382756dbec34', 'name': 'volume-bd2d9701-64ca-4c51-b6f2-382756dbec34', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '587edf89-2ea0-4b89-8830-fa766b798398', 'attached_at': '', 'detached_at': '', 'volume_id': 'bd2d9701-64ca-4c51-b6f2-382756dbec34', 'serial': 'bd2d9701-64ca-4c51-b6f2-382756dbec34'} {{(pid=62684) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2280.679908] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053703, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2280.883777] env[62684]: DEBUG nova.compute.manager [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2280.910176] env[62684]: DEBUG nova.virt.hardware [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2280.910469] env[62684]: DEBUG nova.virt.hardware [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2280.910653] env[62684]: DEBUG nova.virt.hardware [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2280.910844] env[62684]: DEBUG nova.virt.hardware [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2280.911048] env[62684]: DEBUG nova.virt.hardware [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2280.911234] env[62684]: DEBUG nova.virt.hardware [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2280.911456] env[62684]: DEBUG nova.virt.hardware [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2280.911641] env[62684]: DEBUG nova.virt.hardware [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2280.911862] env[62684]: DEBUG nova.virt.hardware [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2280.912043] env[62684]: DEBUG nova.virt.hardware [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2280.912197] env[62684]: DEBUG nova.virt.hardware [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2280.913122] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f99b6c6-190b-49ba-8704-ec2c24496d00 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2280.922024] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2ea51f9-adc6-4970-8672-a952875587c0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2281.175878] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053703, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2281.180572] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f0409ccc-e996-47c9-80e3-b9de699d2f1d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "ba12fa9a-10e3-4624-98b5-4ff7365e1940" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2281.180849] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f0409ccc-e996-47c9-80e3-b9de699d2f1d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "ba12fa9a-10e3-4624-98b5-4ff7365e1940" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2281.181260] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f0409ccc-e996-47c9-80e3-b9de699d2f1d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "ba12fa9a-10e3-4624-98b5-4ff7365e1940-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2281.181513] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f0409ccc-e996-47c9-80e3-b9de699d2f1d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "ba12fa9a-10e3-4624-98b5-4ff7365e1940-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2281.181701] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f0409ccc-e996-47c9-80e3-b9de699d2f1d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "ba12fa9a-10e3-4624-98b5-4ff7365e1940-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2281.184152] env[62684]: INFO nova.compute.manager [None req-f0409ccc-e996-47c9-80e3-b9de699d2f1d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Terminating instance [ 2281.186163] env[62684]: DEBUG nova.compute.manager [None req-f0409ccc-e996-47c9-80e3-b9de699d2f1d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2281.186388] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f0409ccc-e996-47c9-80e3-b9de699d2f1d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2281.187250] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-901b2d8d-83d7-4602-a25e-879a77d734d9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2281.197606] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0409ccc-e996-47c9-80e3-b9de699d2f1d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2281.197919] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3274436c-fed3-4f1e-aa7b-b528358c0471 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2281.205679] env[62684]: DEBUG oslo_vmware.api [None req-f0409ccc-e996-47c9-80e3-b9de699d2f1d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2281.205679] env[62684]: value = "task-2053707" [ 2281.205679] env[62684]: _type = "Task" [ 2281.205679] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2281.215135] env[62684]: DEBUG oslo_vmware.api [None req-f0409ccc-e996-47c9-80e3-b9de699d2f1d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053707, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2281.551881] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2281.552226] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9ad0369a-8786-46c7-9185-3027473238be {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2281.560239] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2281.560239] env[62684]: value = "task-2053708" [ 2281.560239] env[62684]: _type = "Task" [ 2281.560239] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2281.569662] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053708, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2281.676422] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053703, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2281.718016] env[62684]: DEBUG oslo_vmware.api [None req-f0409ccc-e996-47c9-80e3-b9de699d2f1d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053707, 'name': PowerOffVM_Task, 'duration_secs': 0.184531} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2281.718219] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0409ccc-e996-47c9-80e3-b9de699d2f1d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2281.718393] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f0409ccc-e996-47c9-80e3-b9de699d2f1d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2281.718671] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d22af7cb-ba68-4c2f-8e6e-457fb5670bf9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.071173] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] VM already powered off {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2282.071550] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Volume detach. Driver type: vmdk {{(pid=62684) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2282.071759] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421403', 'volume_id': 'bd2d9701-64ca-4c51-b6f2-382756dbec34', 'name': 'volume-bd2d9701-64ca-4c51-b6f2-382756dbec34', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '587edf89-2ea0-4b89-8830-fa766b798398', 'attached_at': '', 'detached_at': '', 'volume_id': 'bd2d9701-64ca-4c51-b6f2-382756dbec34', 'serial': 'bd2d9701-64ca-4c51-b6f2-382756dbec34'} {{(pid=62684) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2282.072654] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baf2af59-86e7-4942-98a5-fb31f247eeea {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.093824] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f5402cb-876f-40bb-82ec-92aee409e9de {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.103017] env[62684]: WARNING nova.virt.vmwareapi.driver [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 2282.103403] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2282.104605] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcea1788-a389-4736-b9f9-33c19b9f4b93 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.114176] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2282.114430] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-02567330-c1f4-4ac8-954e-f99c46cbc78f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.178733] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053703, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2282.183808] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f0409ccc-e996-47c9-80e3-b9de699d2f1d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2282.184078] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f0409ccc-e996-47c9-80e3-b9de699d2f1d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2282.184393] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0409ccc-e996-47c9-80e3-b9de699d2f1d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Deleting the datastore file [datastore1] ba12fa9a-10e3-4624-98b5-4ff7365e1940 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2282.184727] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-54291288-edd5-4825-9ad2-f01108bcd781 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.193697] env[62684]: DEBUG oslo_vmware.api [None req-f0409ccc-e996-47c9-80e3-b9de699d2f1d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2282.193697] env[62684]: value = "task-2053711" [ 2282.193697] env[62684]: _type = "Task" [ 2282.193697] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2282.204691] env[62684]: DEBUG oslo_vmware.api [None req-f0409ccc-e996-47c9-80e3-b9de699d2f1d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053711, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2282.313266] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2282.313527] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2282.313797] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Deleting the datastore file [datastore2] 587edf89-2ea0-4b89-8830-fa766b798398 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2282.314184] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6f673bf2-856c-4ffc-ba95-8b57ab1170bb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.322929] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2282.322929] env[62684]: value = "task-2053712" [ 2282.322929] env[62684]: _type = "Task" [ 2282.322929] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2282.332239] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053712, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2282.676918] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053703, 'name': CreateVM_Task, 'duration_secs': 6.264886} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2282.677143] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2282.677910] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2282.678108] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2282.678482] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2282.678753] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bec536cf-ac07-4053-b2bb-383d1f35969c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.683818] env[62684]: DEBUG oslo_vmware.api [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2282.683818] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52148722-f21f-2c6e-aed5-161e159b6925" [ 2282.683818] env[62684]: _type = "Task" [ 2282.683818] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2282.692026] env[62684]: DEBUG oslo_vmware.api [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52148722-f21f-2c6e-aed5-161e159b6925, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2282.702021] env[62684]: DEBUG oslo_vmware.api [None req-f0409ccc-e996-47c9-80e3-b9de699d2f1d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053711, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.290741} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2282.702319] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0409ccc-e996-47c9-80e3-b9de699d2f1d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2282.702506] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f0409ccc-e996-47c9-80e3-b9de699d2f1d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2282.702775] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f0409ccc-e996-47c9-80e3-b9de699d2f1d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2282.702894] env[62684]: INFO nova.compute.manager [None req-f0409ccc-e996-47c9-80e3-b9de699d2f1d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Took 1.52 seconds to destroy the instance on the hypervisor. [ 2282.703168] env[62684]: DEBUG oslo.service.loopingcall [None req-f0409ccc-e996-47c9-80e3-b9de699d2f1d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2282.703390] env[62684]: DEBUG nova.compute.manager [-] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2282.703484] env[62684]: DEBUG nova.network.neutron [-] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2282.833972] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053712, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.258866} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2282.834491] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2282.834709] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2282.834912] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2283.196183] env[62684]: DEBUG oslo_vmware.api [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52148722-f21f-2c6e-aed5-161e159b6925, 'name': SearchDatastore_Task, 'duration_secs': 0.012247} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2283.196591] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2283.196782] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2283.197059] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2283.197225] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2283.197411] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2283.197694] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4afdec4c-da2e-4b1c-bc13-7a02485bb7ce {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.209083] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2283.209316] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2283.210421] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-235ee5f2-aa1a-44e1-9147-904ccb6db4c1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.217776] env[62684]: DEBUG oslo_vmware.api [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2283.217776] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a04f10-5a3d-3b50-2867-a2dbbde1f50d" [ 2283.217776] env[62684]: _type = "Task" [ 2283.217776] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2283.229762] env[62684]: DEBUG oslo_vmware.api [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a04f10-5a3d-3b50-2867-a2dbbde1f50d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2283.340915] env[62684]: INFO nova.virt.block_device [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Booting with volume bd2d9701-64ca-4c51-b6f2-382756dbec34 at /dev/sdb [ 2283.380440] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bf7c61dd-f9c5-47ff-b736-b89bb5a7efa4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.393306] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4745706f-fbe6-444e-b8cd-43b6bb0cd820 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.429200] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fbac88d8-db16-430b-81fd-6e38e4ffbf1a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.432275] env[62684]: DEBUG nova.compute.manager [req-6b5e4233-8ef3-48ab-a428-547088ecf105 req-6a7e68d9-36a8-4e0a-adc0-b3bb88c83965 service nova] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Received event network-vif-deleted-0d17b2ea-8e17-456b-87e2-1e2bec93f187 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2283.432479] env[62684]: INFO nova.compute.manager [req-6b5e4233-8ef3-48ab-a428-547088ecf105 req-6a7e68d9-36a8-4e0a-adc0-b3bb88c83965 service nova] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Neutron deleted interface 0d17b2ea-8e17-456b-87e2-1e2bec93f187; detaching it from the instance and deleting it from the info cache [ 2283.432684] env[62684]: DEBUG nova.network.neutron [req-6b5e4233-8ef3-48ab-a428-547088ecf105 req-6a7e68d9-36a8-4e0a-adc0-b3bb88c83965 service nova] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2283.444490] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09f3f4c9-629b-48cf-9e12-a9aa6b664575 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.459433] env[62684]: DEBUG nova.compute.manager [req-a0d298d3-6874-46bf-a3c8-f44c113c29f2 req-7bd0c261-fc11-412e-acbd-ca2ee90419db service nova] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Received event network-vif-plugged-3a0c1e2a-c83c-475e-889b-d951a26f7708 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2283.459768] env[62684]: DEBUG oslo_concurrency.lockutils [req-a0d298d3-6874-46bf-a3c8-f44c113c29f2 req-7bd0c261-fc11-412e-acbd-ca2ee90419db service nova] Acquiring lock "a9965f71-e965-4144-a64a-6ee43ad20fc0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2283.460033] env[62684]: DEBUG oslo_concurrency.lockutils [req-a0d298d3-6874-46bf-a3c8-f44c113c29f2 req-7bd0c261-fc11-412e-acbd-ca2ee90419db service nova] Lock "a9965f71-e965-4144-a64a-6ee43ad20fc0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2283.460249] env[62684]: DEBUG oslo_concurrency.lockutils [req-a0d298d3-6874-46bf-a3c8-f44c113c29f2 req-7bd0c261-fc11-412e-acbd-ca2ee90419db service nova] Lock "a9965f71-e965-4144-a64a-6ee43ad20fc0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2283.460429] env[62684]: DEBUG nova.compute.manager [req-a0d298d3-6874-46bf-a3c8-f44c113c29f2 req-7bd0c261-fc11-412e-acbd-ca2ee90419db service nova] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] No waiting events found dispatching network-vif-plugged-3a0c1e2a-c83c-475e-889b-d951a26f7708 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2283.460597] env[62684]: WARNING nova.compute.manager [req-a0d298d3-6874-46bf-a3c8-f44c113c29f2 req-7bd0c261-fc11-412e-acbd-ca2ee90419db service nova] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Received unexpected event network-vif-plugged-3a0c1e2a-c83c-475e-889b-d951a26f7708 for instance with vm_state building and task_state spawning. [ 2283.481559] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-902a909f-47ee-4aaa-b9e5-b5123ccedc28 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.488987] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b64d094e-c487-4711-8e3d-b920acb224bb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.504834] env[62684]: DEBUG nova.virt.block_device [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Updating existing volume attachment record: 07ad7007-20bd-4179-a901-31a383701e01 {{(pid=62684) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2283.733209] env[62684]: DEBUG oslo_vmware.api [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a04f10-5a3d-3b50-2867-a2dbbde1f50d, 'name': SearchDatastore_Task, 'duration_secs': 0.015039} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2283.734050] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac9ebc30-3321-4971-b1d6-1fd3275986e5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.740347] env[62684]: DEBUG oslo_vmware.api [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2283.740347] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f003d1-d427-8604-ace5-756adf2cba25" [ 2283.740347] env[62684]: _type = "Task" [ 2283.740347] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2283.749516] env[62684]: DEBUG oslo_vmware.api [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f003d1-d427-8604-ace5-756adf2cba25, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2283.907278] env[62684]: DEBUG nova.network.neutron [-] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2283.935269] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-63c68d7f-2a35-448d-9153-fa0dd40bc959 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.945808] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85b28207-0900-47be-a935-e5225d2114c9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.976066] env[62684]: DEBUG nova.compute.manager [req-6b5e4233-8ef3-48ab-a428-547088ecf105 req-6a7e68d9-36a8-4e0a-adc0-b3bb88c83965 service nova] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Detach interface failed, port_id=0d17b2ea-8e17-456b-87e2-1e2bec93f187, reason: Instance ba12fa9a-10e3-4624-98b5-4ff7365e1940 could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2284.042194] env[62684]: DEBUG nova.network.neutron [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Successfully updated port: 3a0c1e2a-c83c-475e-889b-d951a26f7708 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2284.252665] env[62684]: DEBUG oslo_vmware.api [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f003d1-d427-8604-ace5-756adf2cba25, 'name': SearchDatastore_Task, 'duration_secs': 0.037963} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2284.254756] env[62684]: DEBUG oslo_vmware.rw_handles [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525c53f4-15e5-5cee-5133-8edeaaa03521/disk-0.vmdk. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2284.255087] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2284.255545] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] b262673b-e4d3-48d8-9f93-6c60d48ae29d/b262673b-e4d3-48d8-9f93-6c60d48ae29d.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2284.256512] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4389af6f-0431-4a7b-83f2-2a92552e1ba1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.258936] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-29b3ac4e-f071-48b6-a32c-55c2fa15da82 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.264861] env[62684]: DEBUG oslo_vmware.rw_handles [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525c53f4-15e5-5cee-5133-8edeaaa03521/disk-0.vmdk is in state: ready. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2284.265095] env[62684]: ERROR oslo_vmware.rw_handles [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525c53f4-15e5-5cee-5133-8edeaaa03521/disk-0.vmdk due to incomplete transfer. [ 2284.265357] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-589fa02b-c373-4343-a33a-48b8a4549165 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.267845] env[62684]: DEBUG oslo_vmware.api [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2284.267845] env[62684]: value = "task-2053713" [ 2284.267845] env[62684]: _type = "Task" [ 2284.267845] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2284.273594] env[62684]: DEBUG oslo_vmware.rw_handles [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525c53f4-15e5-5cee-5133-8edeaaa03521/disk-0.vmdk. {{(pid=62684) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2284.273803] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Uploaded image a08ef668-cfc2-427d-9c76-1c1bbb64d819 to the Glance image server {{(pid=62684) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2284.276448] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Destroying the VM {{(pid=62684) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2284.279449] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-dbe8dd4b-c328-458f-8b25-cb947a48b537 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.280965] env[62684]: DEBUG oslo_vmware.api [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053713, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2284.287044] env[62684]: DEBUG oslo_vmware.api [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for the task: (returnval){ [ 2284.287044] env[62684]: value = "task-2053714" [ 2284.287044] env[62684]: _type = "Task" [ 2284.287044] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2284.297197] env[62684]: DEBUG oslo_vmware.api [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053714, 'name': Destroy_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2284.410253] env[62684]: INFO nova.compute.manager [-] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Took 1.71 seconds to deallocate network for instance. [ 2284.546053] env[62684]: DEBUG oslo_concurrency.lockutils [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Acquiring lock "refresh_cache-a9965f71-e965-4144-a64a-6ee43ad20fc0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2284.546290] env[62684]: DEBUG oslo_concurrency.lockutils [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Acquired lock "refresh_cache-a9965f71-e965-4144-a64a-6ee43ad20fc0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2284.546521] env[62684]: DEBUG nova.network.neutron [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2284.778533] env[62684]: DEBUG oslo_vmware.api [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053713, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2284.796036] env[62684]: DEBUG oslo_vmware.api [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053714, 'name': Destroy_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2284.917161] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f0409ccc-e996-47c9-80e3-b9de699d2f1d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2284.917470] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f0409ccc-e996-47c9-80e3-b9de699d2f1d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2284.917702] env[62684]: DEBUG nova.objects.instance [None req-f0409ccc-e996-47c9-80e3-b9de699d2f1d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lazy-loading 'resources' on Instance uuid ba12fa9a-10e3-4624-98b5-4ff7365e1940 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2285.081733] env[62684]: DEBUG nova.network.neutron [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2285.250719] env[62684]: DEBUG nova.network.neutron [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Updating instance_info_cache with network_info: [{"id": "3a0c1e2a-c83c-475e-889b-d951a26f7708", "address": "fa:16:3e:8c:6e:10", "network": {"id": "02c48e78-6836-43a6-a018-9452acb08ac9", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1506604142-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0ee666ab952b42bd84461e4c3a76e6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a0c1e2a-c8", "ovs_interfaceid": "3a0c1e2a-c83c-475e-889b-d951a26f7708", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2285.280081] env[62684]: DEBUG oslo_vmware.api [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053713, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.523672} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2285.280378] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] b262673b-e4d3-48d8-9f93-6c60d48ae29d/b262673b-e4d3-48d8-9f93-6c60d48ae29d.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2285.280446] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2285.280682] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d36d14b0-0b7b-4537-b108-147c9c683466 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.287158] env[62684]: DEBUG oslo_vmware.api [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2285.287158] env[62684]: value = "task-2053715" [ 2285.287158] env[62684]: _type = "Task" [ 2285.287158] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2285.298939] env[62684]: DEBUG oslo_vmware.api [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053715, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2285.301893] env[62684]: DEBUG oslo_vmware.api [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053714, 'name': Destroy_Task, 'duration_secs': 0.523213} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2285.302147] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Destroyed the VM [ 2285.302417] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Deleting Snapshot of the VM instance {{(pid=62684) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2285.302679] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-f523bb6a-5b45-4e9b-ad9e-2d8f2d6db00e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.309570] env[62684]: DEBUG oslo_vmware.api [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for the task: (returnval){ [ 2285.309570] env[62684]: value = "task-2053716" [ 2285.309570] env[62684]: _type = "Task" [ 2285.309570] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2285.318152] env[62684]: DEBUG oslo_vmware.api [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053716, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2285.451409] env[62684]: DEBUG nova.compute.manager [req-9f21a439-611d-49d1-970f-ab955d33304e req-9e2a5d5c-5146-45d4-9bef-477b1c9fee0f service nova] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Received event network-changed-3a0c1e2a-c83c-475e-889b-d951a26f7708 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2285.451492] env[62684]: DEBUG nova.compute.manager [req-9f21a439-611d-49d1-970f-ab955d33304e req-9e2a5d5c-5146-45d4-9bef-477b1c9fee0f service nova] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Refreshing instance network info cache due to event network-changed-3a0c1e2a-c83c-475e-889b-d951a26f7708. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2285.451710] env[62684]: DEBUG oslo_concurrency.lockutils [req-9f21a439-611d-49d1-970f-ab955d33304e req-9e2a5d5c-5146-45d4-9bef-477b1c9fee0f service nova] Acquiring lock "refresh_cache-a9965f71-e965-4144-a64a-6ee43ad20fc0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2285.534879] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13b34215-8af0-421f-b588-88ea672a436f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.543256] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a3cbc84-2781-4920-b3ee-d7839b415f0a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.573624] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fda13d9-1088-4484-8f47-87b27d16cc68 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.582267] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c280c569-250c-4248-8c3c-4a40927097a6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.598532] env[62684]: DEBUG nova.compute.provider_tree [None req-f0409ccc-e996-47c9-80e3-b9de699d2f1d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2285.621614] env[62684]: DEBUG nova.virt.hardware [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2285.621880] env[62684]: DEBUG nova.virt.hardware [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2285.622040] env[62684]: DEBUG nova.virt.hardware [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2285.622225] env[62684]: DEBUG nova.virt.hardware [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2285.622445] env[62684]: DEBUG nova.virt.hardware [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2285.622661] env[62684]: DEBUG nova.virt.hardware [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2285.622830] env[62684]: DEBUG nova.virt.hardware [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2285.622996] env[62684]: DEBUG nova.virt.hardware [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2285.623183] env[62684]: DEBUG nova.virt.hardware [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2285.623352] env[62684]: DEBUG nova.virt.hardware [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2285.623528] env[62684]: DEBUG nova.virt.hardware [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2285.624617] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0154adf2-a1bf-4f98-a36d-719326415090 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.633465] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae71fc56-d64b-4003-af69-47e212e9d1c6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.648900] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:58:06:3e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '171aeae0-6a27-44fc-bc3d-a2d5581fc702', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '91d5100c-0d94-42a3-a4f2-5055bd108b50', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2285.656453] env[62684]: DEBUG oslo.service.loopingcall [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2285.656819] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2285.657136] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-46477c27-8e1c-44ea-8ee8-a91b568efe10 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.675601] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2285.675601] env[62684]: value = "task-2053717" [ 2285.675601] env[62684]: _type = "Task" [ 2285.675601] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2285.684610] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053717, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2285.753143] env[62684]: DEBUG oslo_concurrency.lockutils [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Releasing lock "refresh_cache-a9965f71-e965-4144-a64a-6ee43ad20fc0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2285.753583] env[62684]: DEBUG nova.compute.manager [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Instance network_info: |[{"id": "3a0c1e2a-c83c-475e-889b-d951a26f7708", "address": "fa:16:3e:8c:6e:10", "network": {"id": "02c48e78-6836-43a6-a018-9452acb08ac9", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1506604142-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0ee666ab952b42bd84461e4c3a76e6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a0c1e2a-c8", "ovs_interfaceid": "3a0c1e2a-c83c-475e-889b-d951a26f7708", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2285.754160] env[62684]: DEBUG oslo_concurrency.lockutils [req-9f21a439-611d-49d1-970f-ab955d33304e req-9e2a5d5c-5146-45d4-9bef-477b1c9fee0f service nova] Acquired lock "refresh_cache-a9965f71-e965-4144-a64a-6ee43ad20fc0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2285.754434] env[62684]: DEBUG nova.network.neutron [req-9f21a439-611d-49d1-970f-ab955d33304e req-9e2a5d5c-5146-45d4-9bef-477b1c9fee0f service nova] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Refreshing network info cache for port 3a0c1e2a-c83c-475e-889b-d951a26f7708 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2285.755837] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8c:6e:10', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'de5fcb06-b0d0-467f-86fe-06882165ac31', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3a0c1e2a-c83c-475e-889b-d951a26f7708', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2285.763553] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Creating folder: Project (0ee666ab952b42bd84461e4c3a76e6d9). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2285.764610] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ef09fcca-c303-4c49-b9b4-e46f83949101 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.777497] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Created folder: Project (0ee666ab952b42bd84461e4c3a76e6d9) in parent group-v421118. [ 2285.777751] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Creating folder: Instances. Parent ref: group-v421408. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2285.778040] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4312f9c9-4696-49f0-8083-49d74f4c3db9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.792108] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Created folder: Instances in parent group-v421408. [ 2285.792297] env[62684]: DEBUG oslo.service.loopingcall [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2285.792870] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2285.793119] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dcd2740f-4705-4dfb-84a9-e2304c627354 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.809698] env[62684]: DEBUG oslo_vmware.api [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053715, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067293} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2285.810382] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2285.813657] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e1e4a31-369d-483c-b304-69a75777daf5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.818734] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2285.818734] env[62684]: value = "task-2053720" [ 2285.818734] env[62684]: _type = "Task" [ 2285.818734] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2285.833570] env[62684]: DEBUG oslo_vmware.api [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053716, 'name': RemoveSnapshot_Task, 'duration_secs': 0.374535} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2285.842507] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] b262673b-e4d3-48d8-9f93-6c60d48ae29d/b262673b-e4d3-48d8-9f93-6c60d48ae29d.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2285.843325] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Deleted Snapshot of the VM instance {{(pid=62684) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2285.843640] env[62684]: DEBUG nova.compute.manager [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2285.844231] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7605f038-41d4-4198-87f6-f59bfb8f4410 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.864878] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-870b29df-bcfb-4cc7-a6a2-816952a9d107 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.867066] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053720, 'name': CreateVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2285.875774] env[62684]: DEBUG oslo_vmware.api [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2285.875774] env[62684]: value = "task-2053721" [ 2285.875774] env[62684]: _type = "Task" [ 2285.875774] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2285.885068] env[62684]: DEBUG oslo_vmware.api [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053721, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2286.103093] env[62684]: DEBUG nova.scheduler.client.report [None req-f0409ccc-e996-47c9-80e3-b9de699d2f1d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2286.186282] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053717, 'name': CreateVM_Task, 'duration_secs': 0.359699} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2286.186466] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2286.187120] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2286.187294] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2286.187629] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2286.187885] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a12e674-0bb5-4232-bda8-f107538cc5d6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.193191] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2286.193191] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5221fafd-ba53-66de-bbe8-64404881b326" [ 2286.193191] env[62684]: _type = "Task" [ 2286.193191] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2286.202394] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5221fafd-ba53-66de-bbe8-64404881b326, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2286.329462] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053720, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2286.385157] env[62684]: INFO nova.compute.manager [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Shelve offloading [ 2286.389348] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2286.390389] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-56fad8e7-e049-4908-bf54-e0be8d1045aa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.395605] env[62684]: DEBUG oslo_vmware.api [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053721, 'name': ReconfigVM_Task, 'duration_secs': 0.296891} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2286.396191] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Reconfigured VM instance instance-0000006d to attach disk [datastore1] b262673b-e4d3-48d8-9f93-6c60d48ae29d/b262673b-e4d3-48d8-9f93-6c60d48ae29d.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2286.396782] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-01898695-e945-43d1-894a-a795b1ce7e6f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.400880] env[62684]: DEBUG oslo_vmware.api [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for the task: (returnval){ [ 2286.400880] env[62684]: value = "task-2053722" [ 2286.400880] env[62684]: _type = "Task" [ 2286.400880] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2286.404824] env[62684]: DEBUG oslo_vmware.api [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2286.404824] env[62684]: value = "task-2053723" [ 2286.404824] env[62684]: _type = "Task" [ 2286.404824] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2286.410813] env[62684]: DEBUG oslo_vmware.api [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053722, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2286.419950] env[62684]: DEBUG oslo_vmware.api [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053723, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2286.593726] env[62684]: DEBUG nova.network.neutron [req-9f21a439-611d-49d1-970f-ab955d33304e req-9e2a5d5c-5146-45d4-9bef-477b1c9fee0f service nova] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Updated VIF entry in instance network info cache for port 3a0c1e2a-c83c-475e-889b-d951a26f7708. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2286.594135] env[62684]: DEBUG nova.network.neutron [req-9f21a439-611d-49d1-970f-ab955d33304e req-9e2a5d5c-5146-45d4-9bef-477b1c9fee0f service nova] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Updating instance_info_cache with network_info: [{"id": "3a0c1e2a-c83c-475e-889b-d951a26f7708", "address": "fa:16:3e:8c:6e:10", "network": {"id": "02c48e78-6836-43a6-a018-9452acb08ac9", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1506604142-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0ee666ab952b42bd84461e4c3a76e6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a0c1e2a-c8", "ovs_interfaceid": "3a0c1e2a-c83c-475e-889b-d951a26f7708", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2286.608561] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f0409ccc-e996-47c9-80e3-b9de699d2f1d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.691s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2286.628181] env[62684]: INFO nova.scheduler.client.report [None req-f0409ccc-e996-47c9-80e3-b9de699d2f1d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Deleted allocations for instance ba12fa9a-10e3-4624-98b5-4ff7365e1940 [ 2286.704715] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5221fafd-ba53-66de-bbe8-64404881b326, 'name': SearchDatastore_Task, 'duration_secs': 0.010525} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2286.705012] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2286.705248] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2286.705484] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2286.705632] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2286.705809] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2286.706079] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2ba6dbce-4857-4d89-9a2d-2f98844a611f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.715430] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2286.715617] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2286.716334] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9918112-7d3a-4338-b942-853edf70ff39 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.721419] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2286.721419] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f81d55-9b55-1d6b-c6e4-7658fa048bb1" [ 2286.721419] env[62684]: _type = "Task" [ 2286.721419] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2286.729250] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f81d55-9b55-1d6b-c6e4-7658fa048bb1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2286.831390] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053720, 'name': CreateVM_Task, 'duration_secs': 0.544296} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2286.831574] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2286.832283] env[62684]: DEBUG oslo_concurrency.lockutils [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2286.832457] env[62684]: DEBUG oslo_concurrency.lockutils [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2286.832787] env[62684]: DEBUG oslo_concurrency.lockutils [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2286.833071] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0bddf04-b265-4d2b-a748-b0efe167d5cb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.837775] env[62684]: DEBUG oslo_vmware.api [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Waiting for the task: (returnval){ [ 2286.837775] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ba9f9a-6f4c-5aa5-0ed0-097322e53340" [ 2286.837775] env[62684]: _type = "Task" [ 2286.837775] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2286.845776] env[62684]: DEBUG oslo_vmware.api [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ba9f9a-6f4c-5aa5-0ed0-097322e53340, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2286.912364] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] VM already powered off {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2286.912657] env[62684]: DEBUG nova.compute.manager [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2286.913561] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dad9c14-a92d-45c5-872a-7f216d90c8d4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.918951] env[62684]: DEBUG oslo_vmware.api [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053723, 'name': Rename_Task, 'duration_secs': 0.143298} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2286.919540] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2286.919777] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-169803ea-12c7-4033-82bf-1dd77e951b55 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.922583] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Acquiring lock "refresh_cache-9f1e9ae9-c082-4fbe-bd21-6e14e40962c1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2286.922753] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Acquired lock "refresh_cache-9f1e9ae9-c082-4fbe-bd21-6e14e40962c1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2286.922925] env[62684]: DEBUG nova.network.neutron [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2286.930046] env[62684]: DEBUG oslo_vmware.api [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2286.930046] env[62684]: value = "task-2053724" [ 2286.930046] env[62684]: _type = "Task" [ 2286.930046] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2286.938787] env[62684]: DEBUG oslo_vmware.api [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053724, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2287.096910] env[62684]: DEBUG oslo_concurrency.lockutils [req-9f21a439-611d-49d1-970f-ab955d33304e req-9e2a5d5c-5146-45d4-9bef-477b1c9fee0f service nova] Releasing lock "refresh_cache-a9965f71-e965-4144-a64a-6ee43ad20fc0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2287.137570] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f0409ccc-e996-47c9-80e3-b9de699d2f1d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "ba12fa9a-10e3-4624-98b5-4ff7365e1940" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.956s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2287.232514] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f81d55-9b55-1d6b-c6e4-7658fa048bb1, 'name': SearchDatastore_Task, 'duration_secs': 0.008625} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2287.233347] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae565d00-5790-4a8d-84d6-7a0b65f0ea9c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.238712] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2287.238712] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521a70f2-ac9f-7c14-188b-ddefa8c1526c" [ 2287.238712] env[62684]: _type = "Task" [ 2287.238712] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2287.246408] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521a70f2-ac9f-7c14-188b-ddefa8c1526c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2287.348382] env[62684]: DEBUG oslo_vmware.api [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ba9f9a-6f4c-5aa5-0ed0-097322e53340, 'name': SearchDatastore_Task, 'duration_secs': 0.009705} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2287.348722] env[62684]: DEBUG oslo_concurrency.lockutils [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2287.348903] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2287.349153] env[62684]: DEBUG oslo_concurrency.lockutils [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2287.349307] env[62684]: DEBUG oslo_concurrency.lockutils [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2287.349490] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2287.349864] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7beb81cb-0f0a-42b6-8a31-dd0522f1177b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.358799] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2287.358988] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2287.359745] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e8020f8-d35e-43b8-a4ef-0351db2629b4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.365430] env[62684]: DEBUG oslo_vmware.api [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Waiting for the task: (returnval){ [ 2287.365430] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5205fe8a-739b-7a69-8544-de853b98a15b" [ 2287.365430] env[62684]: _type = "Task" [ 2287.365430] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2287.373581] env[62684]: DEBUG oslo_vmware.api [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5205fe8a-739b-7a69-8544-de853b98a15b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2287.440588] env[62684]: DEBUG oslo_vmware.api [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053724, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2287.730928] env[62684]: DEBUG nova.network.neutron [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Updating instance_info_cache with network_info: [{"id": "61adf5cc-1692-4079-b909-b15313ce9680", "address": "fa:16:3e:17:9f:65", "network": {"id": "4142ba34-c2e0-4a22-a8dd-be06ba98c6e5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1627792019-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f0e0f0e1dc834134913bd742fa99b52f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61adf5cc-16", "ovs_interfaceid": "61adf5cc-1692-4079-b909-b15313ce9680", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2287.751365] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521a70f2-ac9f-7c14-188b-ddefa8c1526c, 'name': SearchDatastore_Task, 'duration_secs': 0.009686} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2287.751635] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2287.751890] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 587edf89-2ea0-4b89-8830-fa766b798398/587edf89-2ea0-4b89-8830-fa766b798398.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2287.752167] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d89053d2-d7c7-4445-8bda-d0b184acd2c2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.762965] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2287.762965] env[62684]: value = "task-2053725" [ 2287.762965] env[62684]: _type = "Task" [ 2287.762965] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2287.772567] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053725, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2287.876662] env[62684]: DEBUG oslo_vmware.api [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5205fe8a-739b-7a69-8544-de853b98a15b, 'name': SearchDatastore_Task, 'duration_secs': 0.008453} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2287.877475] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8cab5e1a-3c45-43ea-814c-7b2ddd11bb12 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.883434] env[62684]: DEBUG oslo_vmware.api [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Waiting for the task: (returnval){ [ 2287.883434] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52360cc4-eeb7-d772-df78-8e45ff1b6c65" [ 2287.883434] env[62684]: _type = "Task" [ 2287.883434] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2287.892446] env[62684]: DEBUG oslo_vmware.api [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52360cc4-eeb7-d772-df78-8e45ff1b6c65, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2287.945481] env[62684]: DEBUG oslo_vmware.api [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053724, 'name': PowerOnVM_Task, 'duration_secs': 0.649968} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2287.945481] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2287.945481] env[62684]: INFO nova.compute.manager [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Took 14.67 seconds to spawn the instance on the hypervisor. [ 2287.945481] env[62684]: DEBUG nova.compute.manager [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2287.946283] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b667471-2d80-48bc-ab09-f891717b3644 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2288.234170] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Releasing lock "refresh_cache-9f1e9ae9-c082-4fbe-bd21-6e14e40962c1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2288.273524] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053725, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.443112} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2288.274112] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 587edf89-2ea0-4b89-8830-fa766b798398/587edf89-2ea0-4b89-8830-fa766b798398.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2288.274112] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2288.274319] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b5be8383-ccaf-4735-9c95-39d14d4866a5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2288.282302] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2288.282302] env[62684]: value = "task-2053726" [ 2288.282302] env[62684]: _type = "Task" [ 2288.282302] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2288.290501] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053726, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2288.394543] env[62684]: DEBUG oslo_vmware.api [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52360cc4-eeb7-d772-df78-8e45ff1b6c65, 'name': SearchDatastore_Task, 'duration_secs': 0.02166} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2288.394961] env[62684]: DEBUG oslo_concurrency.lockutils [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2288.395278] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] a9965f71-e965-4144-a64a-6ee43ad20fc0/a9965f71-e965-4144-a64a-6ee43ad20fc0.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2288.395552] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9876970e-204d-44a7-8e12-7ee9c609a5d4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2288.403178] env[62684]: DEBUG oslo_vmware.api [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Waiting for the task: (returnval){ [ 2288.403178] env[62684]: value = "task-2053727" [ 2288.403178] env[62684]: _type = "Task" [ 2288.403178] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2288.411261] env[62684]: DEBUG oslo_vmware.api [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Task: {'id': task-2053727, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2288.471806] env[62684]: DEBUG nova.compute.manager [req-6fbbf91b-d5b2-4425-8a64-0be104a9082a req-c081bf9d-4bd4-4d9a-9522-3a903c7be47f service nova] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Received event network-vif-unplugged-61adf5cc-1692-4079-b909-b15313ce9680 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2288.472070] env[62684]: DEBUG oslo_concurrency.lockutils [req-6fbbf91b-d5b2-4425-8a64-0be104a9082a req-c081bf9d-4bd4-4d9a-9522-3a903c7be47f service nova] Acquiring lock "9f1e9ae9-c082-4fbe-bd21-6e14e40962c1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2288.472317] env[62684]: DEBUG oslo_concurrency.lockutils [req-6fbbf91b-d5b2-4425-8a64-0be104a9082a req-c081bf9d-4bd4-4d9a-9522-3a903c7be47f service nova] Lock "9f1e9ae9-c082-4fbe-bd21-6e14e40962c1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2288.472490] env[62684]: DEBUG oslo_concurrency.lockutils [req-6fbbf91b-d5b2-4425-8a64-0be104a9082a req-c081bf9d-4bd4-4d9a-9522-3a903c7be47f service nova] Lock "9f1e9ae9-c082-4fbe-bd21-6e14e40962c1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2288.472666] env[62684]: DEBUG nova.compute.manager [req-6fbbf91b-d5b2-4425-8a64-0be104a9082a req-c081bf9d-4bd4-4d9a-9522-3a903c7be47f service nova] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] No waiting events found dispatching network-vif-unplugged-61adf5cc-1692-4079-b909-b15313ce9680 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2288.472842] env[62684]: WARNING nova.compute.manager [req-6fbbf91b-d5b2-4425-8a64-0be104a9082a req-c081bf9d-4bd4-4d9a-9522-3a903c7be47f service nova] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Received unexpected event network-vif-unplugged-61adf5cc-1692-4079-b909-b15313ce9680 for instance with vm_state shelved and task_state shelving_offloading. [ 2288.473383] env[62684]: INFO nova.compute.manager [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Took 19.46 seconds to build instance. [ 2288.494684] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2288.495589] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f44e485-4b36-4f6e-859c-164681d00894 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2288.504143] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2288.504397] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a3dd4e97-dab0-4a3f-83c5-fae4fb9ce7e7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2288.665105] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2288.665429] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2288.665627] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Deleting the datastore file [datastore2] 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2288.665929] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6514bb41-41ce-46f9-9687-4615a6272efc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2288.674962] env[62684]: DEBUG oslo_vmware.api [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for the task: (returnval){ [ 2288.674962] env[62684]: value = "task-2053729" [ 2288.674962] env[62684]: _type = "Task" [ 2288.674962] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2288.686418] env[62684]: DEBUG oslo_vmware.api [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053729, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2288.793572] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053726, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066315} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2288.793867] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2288.794774] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-752e2af1-173e-4a9a-a006-7bf853ba4c8d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2288.818215] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] 587edf89-2ea0-4b89-8830-fa766b798398/587edf89-2ea0-4b89-8830-fa766b798398.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2288.818568] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-377c40d4-1771-462b-8f56-c9d527686826 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2288.838953] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2288.838953] env[62684]: value = "task-2053730" [ 2288.838953] env[62684]: _type = "Task" [ 2288.838953] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2288.847537] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053730, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2288.914036] env[62684]: DEBUG oslo_vmware.api [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Task: {'id': task-2053727, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.43728} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2288.914314] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] a9965f71-e965-4144-a64a-6ee43ad20fc0/a9965f71-e965-4144-a64a-6ee43ad20fc0.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2288.914565] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2288.914837] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2384b5fc-1c68-4a5e-93ab-3b33625cdaab {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2288.922711] env[62684]: DEBUG oslo_vmware.api [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Waiting for the task: (returnval){ [ 2288.922711] env[62684]: value = "task-2053731" [ 2288.922711] env[62684]: _type = "Task" [ 2288.922711] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2288.931312] env[62684]: DEBUG oslo_vmware.api [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Task: {'id': task-2053731, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2288.978179] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71444456-a050-40fd-a3a4-164baec816d3 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "b262673b-e4d3-48d8-9f93-6c60d48ae29d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.976s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2289.128230] env[62684]: DEBUG nova.compute.manager [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Stashing vm_state: active {{(pid=62684) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 2289.186207] env[62684]: DEBUG oslo_vmware.api [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053729, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.2455} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2289.186387] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2289.186581] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2289.186758] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2289.207235] env[62684]: INFO nova.scheduler.client.report [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Deleted allocations for instance 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1 [ 2289.348790] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053730, 'name': ReconfigVM_Task, 'duration_secs': 0.310816} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2289.349081] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Reconfigured VM instance instance-00000067 to attach disk [datastore2] 587edf89-2ea0-4b89-8830-fa766b798398/587edf89-2ea0-4b89-8830-fa766b798398.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2289.350264] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'disk_bus': None, 'boot_index': 0, 'device_name': '/dev/sda', 'device_type': 'disk', 'encryption_secret_uuid': None, 'encryption_options': None, 'encrypted': False, 'guest_format': None, 'size': 0, 'encryption_format': None, 'image_id': '3931321c-cb4c-4b87-8d3a-50e05ea01db2'}], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': False, 'disk_bus': None, 'boot_index': None, 'device_type': None, 'attachment_id': '07ad7007-20bd-4179-a901-31a383701e01', 'guest_format': None, 'mount_device': '/dev/sdb', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421403', 'volume_id': 'bd2d9701-64ca-4c51-b6f2-382756dbec34', 'name': 'volume-bd2d9701-64ca-4c51-b6f2-382756dbec34', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '587edf89-2ea0-4b89-8830-fa766b798398', 'attached_at': '', 'detached_at': '', 'volume_id': 'bd2d9701-64ca-4c51-b6f2-382756dbec34', 'serial': 'bd2d9701-64ca-4c51-b6f2-382756dbec34'}, 'volume_type': None}], 'swap': None} {{(pid=62684) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 2289.350471] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Volume attach. Driver type: vmdk {{(pid=62684) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2289.350669] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421403', 'volume_id': 'bd2d9701-64ca-4c51-b6f2-382756dbec34', 'name': 'volume-bd2d9701-64ca-4c51-b6f2-382756dbec34', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '587edf89-2ea0-4b89-8830-fa766b798398', 'attached_at': '', 'detached_at': '', 'volume_id': 'bd2d9701-64ca-4c51-b6f2-382756dbec34', 'serial': 'bd2d9701-64ca-4c51-b6f2-382756dbec34'} {{(pid=62684) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2289.351509] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db3ec244-9f30-46e9-9d9f-66df8a598ef9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2289.368874] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a147740-708e-4a12-8065-6955583909b7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2289.393211] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] volume-bd2d9701-64ca-4c51-b6f2-382756dbec34/volume-bd2d9701-64ca-4c51-b6f2-382756dbec34.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2289.393495] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fc36c6f8-5c91-4db4-b351-84c809061f64 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2289.412777] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2289.412777] env[62684]: value = "task-2053732" [ 2289.412777] env[62684]: _type = "Task" [ 2289.412777] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2289.421229] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053732, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2289.431782] env[62684]: DEBUG oslo_vmware.api [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Task: {'id': task-2053731, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074167} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2289.432162] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2289.432981] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64d716ae-3db9-491f-a7dd-84ec3f9b468f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2289.457432] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Reconfiguring VM instance instance-0000006e to attach disk [datastore1] a9965f71-e965-4144-a64a-6ee43ad20fc0/a9965f71-e965-4144-a64a-6ee43ad20fc0.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2289.457755] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-be3be488-8281-4398-951d-879964801692 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2289.477090] env[62684]: DEBUG oslo_vmware.api [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Waiting for the task: (returnval){ [ 2289.477090] env[62684]: value = "task-2053733" [ 2289.477090] env[62684]: _type = "Task" [ 2289.477090] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2289.485242] env[62684]: DEBUG oslo_vmware.api [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Task: {'id': task-2053733, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2289.528459] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7e1df76-65f5-4ba9-8bc6-c47783951389 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2289.535631] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8815cba3-93ca-4953-8623-75a7dc2da31d tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Suspending the VM {{(pid=62684) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 2289.535935] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-a7fd7f1b-4264-4ea1-9ae2-2ea48edf9ea6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2289.542431] env[62684]: DEBUG oslo_vmware.api [None req-8815cba3-93ca-4953-8623-75a7dc2da31d tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2289.542431] env[62684]: value = "task-2053734" [ 2289.542431] env[62684]: _type = "Task" [ 2289.542431] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2289.550562] env[62684]: DEBUG oslo_vmware.api [None req-8815cba3-93ca-4953-8623-75a7dc2da31d tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053734, 'name': SuspendVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2289.648901] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2289.649207] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2289.712445] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2289.924342] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053732, 'name': ReconfigVM_Task, 'duration_secs': 0.309633} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2289.924710] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Reconfigured VM instance instance-00000067 to attach disk [datastore1] volume-bd2d9701-64ca-4c51-b6f2-382756dbec34/volume-bd2d9701-64ca-4c51-b6f2-382756dbec34.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2289.929595] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9327bf37-1124-49bf-a2a1-87f8a23a5cbd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2289.949073] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2289.949073] env[62684]: value = "task-2053735" [ 2289.949073] env[62684]: _type = "Task" [ 2289.949073] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2289.961956] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053735, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2289.987247] env[62684]: DEBUG oslo_vmware.api [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Task: {'id': task-2053733, 'name': ReconfigVM_Task, 'duration_secs': 0.351816} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2289.987525] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Reconfigured VM instance instance-0000006e to attach disk [datastore1] a9965f71-e965-4144-a64a-6ee43ad20fc0/a9965f71-e965-4144-a64a-6ee43ad20fc0.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2289.988262] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-79ba5fb7-1665-4284-a2dc-87b6c696fbce {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2289.996145] env[62684]: DEBUG oslo_vmware.api [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Waiting for the task: (returnval){ [ 2289.996145] env[62684]: value = "task-2053736" [ 2289.996145] env[62684]: _type = "Task" [ 2289.996145] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2290.004720] env[62684]: DEBUG oslo_vmware.api [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Task: {'id': task-2053736, 'name': Rename_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2290.054151] env[62684]: DEBUG oslo_vmware.api [None req-8815cba3-93ca-4953-8623-75a7dc2da31d tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053734, 'name': SuspendVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2290.154152] env[62684]: INFO nova.compute.claims [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2290.460725] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053735, 'name': ReconfigVM_Task, 'duration_secs': 0.160033} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2290.461246] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421403', 'volume_id': 'bd2d9701-64ca-4c51-b6f2-382756dbec34', 'name': 'volume-bd2d9701-64ca-4c51-b6f2-382756dbec34', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '587edf89-2ea0-4b89-8830-fa766b798398', 'attached_at': '', 'detached_at': '', 'volume_id': 'bd2d9701-64ca-4c51-b6f2-382756dbec34', 'serial': 'bd2d9701-64ca-4c51-b6f2-382756dbec34'} {{(pid=62684) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2290.461589] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1a22856a-161f-45e7-996b-373fbac76e87 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.469404] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2290.469404] env[62684]: value = "task-2053737" [ 2290.469404] env[62684]: _type = "Task" [ 2290.469404] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2290.477286] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053737, 'name': Rename_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2290.502481] env[62684]: DEBUG nova.compute.manager [req-460557c9-4348-4ccc-b4de-5528521f8812 req-637cab63-84e4-41a7-8fb8-7da6afbca263 service nova] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Received event network-changed-61adf5cc-1692-4079-b909-b15313ce9680 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2290.502707] env[62684]: DEBUG nova.compute.manager [req-460557c9-4348-4ccc-b4de-5528521f8812 req-637cab63-84e4-41a7-8fb8-7da6afbca263 service nova] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Refreshing instance network info cache due to event network-changed-61adf5cc-1692-4079-b909-b15313ce9680. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2290.502927] env[62684]: DEBUG oslo_concurrency.lockutils [req-460557c9-4348-4ccc-b4de-5528521f8812 req-637cab63-84e4-41a7-8fb8-7da6afbca263 service nova] Acquiring lock "refresh_cache-9f1e9ae9-c082-4fbe-bd21-6e14e40962c1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2290.503090] env[62684]: DEBUG oslo_concurrency.lockutils [req-460557c9-4348-4ccc-b4de-5528521f8812 req-637cab63-84e4-41a7-8fb8-7da6afbca263 service nova] Acquired lock "refresh_cache-9f1e9ae9-c082-4fbe-bd21-6e14e40962c1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2290.503274] env[62684]: DEBUG nova.network.neutron [req-460557c9-4348-4ccc-b4de-5528521f8812 req-637cab63-84e4-41a7-8fb8-7da6afbca263 service nova] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Refreshing network info cache for port 61adf5cc-1692-4079-b909-b15313ce9680 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2290.510525] env[62684]: DEBUG oslo_vmware.api [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Task: {'id': task-2053736, 'name': Rename_Task, 'duration_secs': 0.150215} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2290.511411] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2290.511672] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ecbf8403-e573-4b0d-b6ba-e9ab00d5a418 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.519111] env[62684]: DEBUG oslo_vmware.api [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Waiting for the task: (returnval){ [ 2290.519111] env[62684]: value = "task-2053738" [ 2290.519111] env[62684]: _type = "Task" [ 2290.519111] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2290.527882] env[62684]: DEBUG oslo_vmware.api [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Task: {'id': task-2053738, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2290.553153] env[62684]: DEBUG oslo_vmware.api [None req-8815cba3-93ca-4953-8623-75a7dc2da31d tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053734, 'name': SuspendVM_Task, 'duration_secs': 0.65528} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2290.553493] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8815cba3-93ca-4953-8623-75a7dc2da31d tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Suspended the VM {{(pid=62684) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 2290.553694] env[62684]: DEBUG nova.compute.manager [None req-8815cba3-93ca-4953-8623-75a7dc2da31d tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2290.554575] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8112699f-b8ac-43cf-858d-a68d27aab7a5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.660786] env[62684]: INFO nova.compute.resource_tracker [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Updating resource usage from migration cc16ccfa-7dfe-43f1-b442-519a0a058755 [ 2290.760695] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-287a345c-a5f0-4158-8b82-afd04b10cfd2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.769067] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09d48d6d-b319-4066-aa21-b742786fe180 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.798572] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccdc673d-d727-4244-add4-9b48ad25290b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.806636] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5d81bc4-89ba-4041-84fa-aa4441e4cb6c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.821931] env[62684]: DEBUG nova.compute.provider_tree [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2290.980076] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053737, 'name': Rename_Task, 'duration_secs': 0.180519} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2290.980640] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2290.980957] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f26a806e-b958-49ba-b55a-3fa72c22e39e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.988033] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2290.988033] env[62684]: value = "task-2053739" [ 2290.988033] env[62684]: _type = "Task" [ 2290.988033] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2290.996629] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053739, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2291.028986] env[62684]: DEBUG oslo_vmware.api [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Task: {'id': task-2053738, 'name': PowerOnVM_Task, 'duration_secs': 0.49289} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2291.029497] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2291.029718] env[62684]: INFO nova.compute.manager [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Took 10.15 seconds to spawn the instance on the hypervisor. [ 2291.030128] env[62684]: DEBUG nova.compute.manager [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2291.030721] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f3e154d-70d2-4f2e-8f25-09539cbb1232 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2291.249398] env[62684]: DEBUG nova.network.neutron [req-460557c9-4348-4ccc-b4de-5528521f8812 req-637cab63-84e4-41a7-8fb8-7da6afbca263 service nova] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Updated VIF entry in instance network info cache for port 61adf5cc-1692-4079-b909-b15313ce9680. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2291.249901] env[62684]: DEBUG nova.network.neutron [req-460557c9-4348-4ccc-b4de-5528521f8812 req-637cab63-84e4-41a7-8fb8-7da6afbca263 service nova] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Updating instance_info_cache with network_info: [{"id": "61adf5cc-1692-4079-b909-b15313ce9680", "address": "fa:16:3e:17:9f:65", "network": {"id": "4142ba34-c2e0-4a22-a8dd-be06ba98c6e5", "bridge": null, "label": "tempest-ServersNegativeTestJSON-1627792019-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f0e0f0e1dc834134913bd742fa99b52f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap61adf5cc-16", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2291.324765] env[62684]: DEBUG nova.scheduler.client.report [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2291.499176] env[62684]: DEBUG oslo_vmware.api [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053739, 'name': PowerOnVM_Task, 'duration_secs': 0.473151} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2291.499546] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2291.499735] env[62684]: DEBUG nova.compute.manager [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2291.500626] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8fdefed-c861-4520-920f-6076d6d7fc8c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2291.551042] env[62684]: INFO nova.compute.manager [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Took 15.59 seconds to build instance. [ 2291.752707] env[62684]: DEBUG oslo_concurrency.lockutils [req-460557c9-4348-4ccc-b4de-5528521f8812 req-637cab63-84e4-41a7-8fb8-7da6afbca263 service nova] Releasing lock "refresh_cache-9f1e9ae9-c082-4fbe-bd21-6e14e40962c1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2291.809265] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Acquiring lock "9f1e9ae9-c082-4fbe-bd21-6e14e40962c1" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2291.828818] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.180s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2291.829063] env[62684]: INFO nova.compute.manager [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Migrating [ 2291.835957] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.124s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2291.836169] env[62684]: DEBUG nova.objects.instance [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Lazy-loading 'resources' on Instance uuid 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2292.012688] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e61a1277-4d7c-404e-93b9-952813a73aff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "b262673b-e4d3-48d8-9f93-6c60d48ae29d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2292.012962] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e61a1277-4d7c-404e-93b9-952813a73aff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "b262673b-e4d3-48d8-9f93-6c60d48ae29d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2292.013197] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e61a1277-4d7c-404e-93b9-952813a73aff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "b262673b-e4d3-48d8-9f93-6c60d48ae29d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2292.013389] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e61a1277-4d7c-404e-93b9-952813a73aff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "b262673b-e4d3-48d8-9f93-6c60d48ae29d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2292.013562] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e61a1277-4d7c-404e-93b9-952813a73aff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "b262673b-e4d3-48d8-9f93-6c60d48ae29d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2292.015936] env[62684]: INFO nova.compute.manager [None req-e61a1277-4d7c-404e-93b9-952813a73aff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Terminating instance [ 2292.023127] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2292.023393] env[62684]: DEBUG nova.compute.manager [None req-e61a1277-4d7c-404e-93b9-952813a73aff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2292.023594] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e61a1277-4d7c-404e-93b9-952813a73aff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2292.024469] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc354057-c3b7-4ee0-90b7-d7201d184065 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.032675] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e61a1277-4d7c-404e-93b9-952813a73aff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2292.032916] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-866f89ba-77be-482c-9efc-faa27b6febb1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.052125] env[62684]: DEBUG oslo_concurrency.lockutils [None req-831adba9-8912-49c9-a127-f2d368021a43 tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Lock "a9965f71-e965-4144-a64a-6ee43ad20fc0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.101s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2292.164434] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e61a1277-4d7c-404e-93b9-952813a73aff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2292.165475] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e61a1277-4d7c-404e-93b9-952813a73aff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2292.165475] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-e61a1277-4d7c-404e-93b9-952813a73aff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Deleting the datastore file [datastore1] b262673b-e4d3-48d8-9f93-6c60d48ae29d {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2292.165475] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-04b04cc8-37d5-4f24-9384-9d3732fa4293 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.172156] env[62684]: DEBUG oslo_vmware.api [None req-e61a1277-4d7c-404e-93b9-952813a73aff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2292.172156] env[62684]: value = "task-2053741" [ 2292.172156] env[62684]: _type = "Task" [ 2292.172156] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2292.180304] env[62684]: DEBUG oslo_vmware.api [None req-e61a1277-4d7c-404e-93b9-952813a73aff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053741, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2292.344886] env[62684]: DEBUG nova.objects.instance [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Lazy-loading 'numa_topology' on Instance uuid 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2292.346307] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "refresh_cache-57537508-06e7-43a4-95c5-c4399b8bf93f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2292.346511] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquired lock "refresh_cache-57537508-06e7-43a4-95c5-c4399b8bf93f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2292.346703] env[62684]: DEBUG nova.network.neutron [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2292.681859] env[62684]: DEBUG oslo_vmware.api [None req-e61a1277-4d7c-404e-93b9-952813a73aff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053741, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.409585} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2292.682215] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-e61a1277-4d7c-404e-93b9-952813a73aff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2292.682344] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e61a1277-4d7c-404e-93b9-952813a73aff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2292.682566] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e61a1277-4d7c-404e-93b9-952813a73aff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2292.682758] env[62684]: INFO nova.compute.manager [None req-e61a1277-4d7c-404e-93b9-952813a73aff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Took 0.66 seconds to destroy the instance on the hypervisor. [ 2292.683027] env[62684]: DEBUG oslo.service.loopingcall [None req-e61a1277-4d7c-404e-93b9-952813a73aff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2292.683242] env[62684]: DEBUG nova.compute.manager [-] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2292.683339] env[62684]: DEBUG nova.network.neutron [-] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2292.851755] env[62684]: DEBUG nova.objects.base [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Object Instance<9f1e9ae9-c082-4fbe-bd21-6e14e40962c1> lazy-loaded attributes: resources,numa_topology {{(pid=62684) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2292.872340] env[62684]: DEBUG oslo_concurrency.lockutils [None req-43e0f947-d632-4e47-bbec-9d7aca7e585a tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Acquiring lock "a9965f71-e965-4144-a64a-6ee43ad20fc0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2292.872615] env[62684]: DEBUG oslo_concurrency.lockutils [None req-43e0f947-d632-4e47-bbec-9d7aca7e585a tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Lock "a9965f71-e965-4144-a64a-6ee43ad20fc0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2292.872839] env[62684]: DEBUG oslo_concurrency.lockutils [None req-43e0f947-d632-4e47-bbec-9d7aca7e585a tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Acquiring lock "a9965f71-e965-4144-a64a-6ee43ad20fc0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2292.876577] env[62684]: DEBUG oslo_concurrency.lockutils [None req-43e0f947-d632-4e47-bbec-9d7aca7e585a tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Lock "a9965f71-e965-4144-a64a-6ee43ad20fc0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2292.876820] env[62684]: DEBUG oslo_concurrency.lockutils [None req-43e0f947-d632-4e47-bbec-9d7aca7e585a tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Lock "a9965f71-e965-4144-a64a-6ee43ad20fc0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.004s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2292.878840] env[62684]: INFO nova.compute.manager [None req-43e0f947-d632-4e47-bbec-9d7aca7e585a tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Terminating instance [ 2292.882077] env[62684]: DEBUG nova.compute.manager [None req-43e0f947-d632-4e47-bbec-9d7aca7e585a tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2292.882284] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-43e0f947-d632-4e47-bbec-9d7aca7e585a tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2292.883164] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c0854bf-a710-4645-8c7a-faa39573f34c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.893410] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-43e0f947-d632-4e47-bbec-9d7aca7e585a tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2292.893648] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ca7b3078-281a-4f6e-806b-73732ca81145 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.899832] env[62684]: DEBUG oslo_vmware.api [None req-43e0f947-d632-4e47-bbec-9d7aca7e585a tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Waiting for the task: (returnval){ [ 2292.899832] env[62684]: value = "task-2053742" [ 2292.899832] env[62684]: _type = "Task" [ 2292.899832] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2292.912796] env[62684]: DEBUG oslo_vmware.api [None req-43e0f947-d632-4e47-bbec-9d7aca7e585a tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Task: {'id': task-2053742, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2292.994664] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39fa080b-34ab-4a07-b833-e5197bc4e386 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.000804] env[62684]: DEBUG nova.compute.manager [req-70341aa4-8ce3-40f0-bb46-8f4038927470 req-2bce8e19-53a8-4d03-b1fa-43bfe298b23d service nova] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Received event network-vif-deleted-38cf4164-d6e1-4c83-a587-36f2e19b13e3 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2293.001088] env[62684]: INFO nova.compute.manager [req-70341aa4-8ce3-40f0-bb46-8f4038927470 req-2bce8e19-53a8-4d03-b1fa-43bfe298b23d service nova] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Neutron deleted interface 38cf4164-d6e1-4c83-a587-36f2e19b13e3; detaching it from the instance and deleting it from the info cache [ 2293.001309] env[62684]: DEBUG nova.network.neutron [req-70341aa4-8ce3-40f0-bb46-8f4038927470 req-2bce8e19-53a8-4d03-b1fa-43bfe298b23d service nova] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2293.005636] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84a866dd-a3f3-4402-8e9a-0e7396e48f3c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.041252] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20d4beb0-2520-430b-9e9a-3b3b32813bdc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.051536] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a04f6b23-4a55-4179-b286-9712281930ce {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.066831] env[62684]: DEBUG nova.compute.provider_tree [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2293.162216] env[62684]: DEBUG nova.network.neutron [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Updating instance_info_cache with network_info: [{"id": "0a96e2ce-2335-44e2-940d-26d3afbafa3a", "address": "fa:16:3e:6d:b8:02", "network": {"id": "1751424b-54a9-4879-9f32-aa15a9bb632c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-120070593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "263c101fcc5e493789b79dfd1ba97cc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92e4d027-e755-417b-8eea-9a8f24b85140", "external-id": "nsx-vlan-transportzone-756", "segmentation_id": 756, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a96e2ce-23", "ovs_interfaceid": "0a96e2ce-2335-44e2-940d-26d3afbafa3a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2293.410600] env[62684]: DEBUG oslo_vmware.api [None req-43e0f947-d632-4e47-bbec-9d7aca7e585a tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Task: {'id': task-2053742, 'name': PowerOffVM_Task, 'duration_secs': 0.193862} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2293.410835] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-43e0f947-d632-4e47-bbec-9d7aca7e585a tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2293.411018] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-43e0f947-d632-4e47-bbec-9d7aca7e585a tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2293.411316] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-291bb269-aff2-4dcb-8cf1-6011b97e4fb4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.472853] env[62684]: DEBUG nova.network.neutron [-] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2293.505664] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bb3fe40a-55d7-42c1-8dd8-040bd519911a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.515048] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-43e0f947-d632-4e47-bbec-9d7aca7e585a tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2293.515048] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-43e0f947-d632-4e47-bbec-9d7aca7e585a tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2293.515048] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-43e0f947-d632-4e47-bbec-9d7aca7e585a tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Deleting the datastore file [datastore1] a9965f71-e965-4144-a64a-6ee43ad20fc0 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2293.515302] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-409a7025-39f9-411f-a2a2-f9aea251d27f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.521284] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc032171-b159-4d65-82e8-ffdd4cf3d897 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.533111] env[62684]: DEBUG oslo_vmware.api [None req-43e0f947-d632-4e47-bbec-9d7aca7e585a tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Waiting for the task: (returnval){ [ 2293.533111] env[62684]: value = "task-2053744" [ 2293.533111] env[62684]: _type = "Task" [ 2293.533111] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2293.542375] env[62684]: DEBUG oslo_vmware.api [None req-43e0f947-d632-4e47-bbec-9d7aca7e585a tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Task: {'id': task-2053744, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2293.552969] env[62684]: DEBUG nova.compute.manager [req-70341aa4-8ce3-40f0-bb46-8f4038927470 req-2bce8e19-53a8-4d03-b1fa-43bfe298b23d service nova] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Detach interface failed, port_id=38cf4164-d6e1-4c83-a587-36f2e19b13e3, reason: Instance b262673b-e4d3-48d8-9f93-6c60d48ae29d could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2293.573726] env[62684]: DEBUG nova.scheduler.client.report [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2293.664745] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Releasing lock "refresh_cache-57537508-06e7-43a4-95c5-c4399b8bf93f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2293.975259] env[62684]: INFO nova.compute.manager [-] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Took 1.29 seconds to deallocate network for instance. [ 2294.044332] env[62684]: DEBUG oslo_vmware.api [None req-43e0f947-d632-4e47-bbec-9d7aca7e585a tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Task: {'id': task-2053744, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148894} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2294.044602] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-43e0f947-d632-4e47-bbec-9d7aca7e585a tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2294.044807] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-43e0f947-d632-4e47-bbec-9d7aca7e585a tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2294.045007] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-43e0f947-d632-4e47-bbec-9d7aca7e585a tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2294.045206] env[62684]: INFO nova.compute.manager [None req-43e0f947-d632-4e47-bbec-9d7aca7e585a tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Took 1.16 seconds to destroy the instance on the hypervisor. [ 2294.045466] env[62684]: DEBUG oslo.service.loopingcall [None req-43e0f947-d632-4e47-bbec-9d7aca7e585a tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2294.045683] env[62684]: DEBUG nova.compute.manager [-] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2294.045787] env[62684]: DEBUG nova.network.neutron [-] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2294.078364] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.242s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2294.081048] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 2.058s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2294.081202] env[62684]: DEBUG nova.objects.instance [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62684) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 2294.481582] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e61a1277-4d7c-404e-93b9-952813a73aff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2294.592476] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a138beae-22ab-4dd9-acea-94e3bc282f48 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Lock "9f1e9ae9-c082-4fbe-bd21-6e14e40962c1" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 24.349s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2294.593370] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Lock "9f1e9ae9-c082-4fbe-bd21-6e14e40962c1" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 2.784s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2294.593555] env[62684]: INFO nova.compute.manager [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Unshelving [ 2294.786925] env[62684]: DEBUG nova.network.neutron [-] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2295.034499] env[62684]: DEBUG nova.compute.manager [req-5ece7a86-889e-4410-9765-3d0bc612807b req-b1f5642f-83e8-4220-a831-dacbb6103abf service nova] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Received event network-vif-deleted-3a0c1e2a-c83c-475e-889b-d951a26f7708 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2295.091906] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9fb70f38-9796-4b88-a700-f8ad145d19c6 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2295.093038] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e61a1277-4d7c-404e-93b9-952813a73aff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.612s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2295.093179] env[62684]: DEBUG nova.objects.instance [None req-e61a1277-4d7c-404e-93b9-952813a73aff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lazy-loading 'resources' on Instance uuid b262673b-e4d3-48d8-9f93-6c60d48ae29d {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2295.182991] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0101a59-7a82-4719-b928-8e9aa750c7df {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.203601] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Updating instance '57537508-06e7-43a4-95c5-c4399b8bf93f' progress to 0 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2295.289748] env[62684]: INFO nova.compute.manager [-] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Took 1.24 seconds to deallocate network for instance. [ 2295.617558] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2295.688026] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4015bfe-564a-4ca0-b965-a5d94302334d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.696526] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e86cd926-cb9e-4a39-9d55-5efa04e6e2cd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.726285] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2295.726754] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-815c0c94-1d1a-4ead-9969-a1cb6d654ad7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.728710] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2de4adfe-0469-40d4-a176-ad9880bb0e3c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.747727] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc0791ec-fdc2-4a8d-886e-9e56d9bff484 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.752877] env[62684]: DEBUG oslo_vmware.api [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2295.752877] env[62684]: value = "task-2053745" [ 2295.752877] env[62684]: _type = "Task" [ 2295.752877] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2295.764132] env[62684]: DEBUG nova.compute.provider_tree [None req-e61a1277-4d7c-404e-93b9-952813a73aff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2295.770712] env[62684]: DEBUG oslo_vmware.api [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053745, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2295.795745] env[62684]: DEBUG oslo_concurrency.lockutils [None req-43e0f947-d632-4e47-bbec-9d7aca7e585a tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2296.265381] env[62684]: DEBUG oslo_vmware.api [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053745, 'name': PowerOffVM_Task, 'duration_secs': 0.187775} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2296.265670] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2296.265827] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Updating instance '57537508-06e7-43a4-95c5-c4399b8bf93f' progress to 17 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2296.269735] env[62684]: DEBUG nova.scheduler.client.report [None req-e61a1277-4d7c-404e-93b9-952813a73aff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2296.775734] env[62684]: DEBUG nova.virt.hardware [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2296.775988] env[62684]: DEBUG nova.virt.hardware [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2296.776169] env[62684]: DEBUG nova.virt.hardware [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2296.776369] env[62684]: DEBUG nova.virt.hardware [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2296.776519] env[62684]: DEBUG nova.virt.hardware [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2296.776671] env[62684]: DEBUG nova.virt.hardware [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2296.776878] env[62684]: DEBUG nova.virt.hardware [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2296.777053] env[62684]: DEBUG nova.virt.hardware [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2296.777231] env[62684]: DEBUG nova.virt.hardware [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2296.777401] env[62684]: DEBUG nova.virt.hardware [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2296.777585] env[62684]: DEBUG nova.virt.hardware [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2296.783431] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e61a1277-4d7c-404e-93b9-952813a73aff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.690s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2296.785445] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f497d6c-5565-45bb-8c32-af495cab8885 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2296.795694] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.178s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2296.795933] env[62684]: DEBUG nova.objects.instance [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Lazy-loading 'pci_requests' on Instance uuid 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2296.804227] env[62684]: DEBUG oslo_vmware.api [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2296.804227] env[62684]: value = "task-2053746" [ 2296.804227] env[62684]: _type = "Task" [ 2296.804227] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2296.814478] env[62684]: DEBUG oslo_vmware.api [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053746, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2296.815478] env[62684]: INFO nova.scheduler.client.report [None req-e61a1277-4d7c-404e-93b9-952813a73aff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Deleted allocations for instance b262673b-e4d3-48d8-9f93-6c60d48ae29d [ 2297.302180] env[62684]: DEBUG nova.objects.instance [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Lazy-loading 'numa_topology' on Instance uuid 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2297.314613] env[62684]: DEBUG oslo_vmware.api [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053746, 'name': ReconfigVM_Task, 'duration_secs': 0.421696} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2297.314921] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Updating instance '57537508-06e7-43a4-95c5-c4399b8bf93f' progress to 33 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2297.322022] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e61a1277-4d7c-404e-93b9-952813a73aff tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "b262673b-e4d3-48d8-9f93-6c60d48ae29d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.309s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2297.804564] env[62684]: INFO nova.compute.claims [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2297.822054] env[62684]: DEBUG nova.virt.hardware [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2297.822054] env[62684]: DEBUG nova.virt.hardware [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2297.822054] env[62684]: DEBUG nova.virt.hardware [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2297.822272] env[62684]: DEBUG nova.virt.hardware [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2297.822272] env[62684]: DEBUG nova.virt.hardware [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2297.822418] env[62684]: DEBUG nova.virt.hardware [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2297.822627] env[62684]: DEBUG nova.virt.hardware [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2297.822785] env[62684]: DEBUG nova.virt.hardware [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2297.822950] env[62684]: DEBUG nova.virt.hardware [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2297.823129] env[62684]: DEBUG nova.virt.hardware [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2297.823308] env[62684]: DEBUG nova.virt.hardware [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2297.828603] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Reconfiguring VM instance instance-0000003d to detach disk 2000 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2297.829141] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-850f045c-7cf6-4e0c-a4dd-78360b99ac1c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2297.850563] env[62684]: DEBUG oslo_vmware.api [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2297.850563] env[62684]: value = "task-2053747" [ 2297.850563] env[62684]: _type = "Task" [ 2297.850563] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2297.859325] env[62684]: DEBUG oslo_vmware.api [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053747, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2297.869485] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "69d26980-f42d-4d35-8de3-a85d7a6f0a11" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2297.869748] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "69d26980-f42d-4d35-8de3-a85d7a6f0a11" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2298.360601] env[62684]: DEBUG oslo_vmware.api [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053747, 'name': ReconfigVM_Task, 'duration_secs': 0.167636} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2298.360920] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Reconfigured VM instance instance-0000003d to detach disk 2000 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2298.361721] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bff15cf2-7f82-4663-9820-ea7e0aa1cfbc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2298.377475] env[62684]: DEBUG nova.compute.manager [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2298.386805] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] 57537508-06e7-43a4-95c5-c4399b8bf93f/57537508-06e7-43a4-95c5-c4399b8bf93f.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2298.387637] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-502b46fc-e77d-4153-a93f-a9160f9ccf3a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2298.406683] env[62684]: DEBUG oslo_vmware.api [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2298.406683] env[62684]: value = "task-2053748" [ 2298.406683] env[62684]: _type = "Task" [ 2298.406683] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2298.415576] env[62684]: DEBUG oslo_vmware.api [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053748, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2298.908034] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2298.915767] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a08d2d1d-638d-4d99-bc75-9bc66e8dcef7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2298.921348] env[62684]: DEBUG oslo_vmware.api [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053748, 'name': ReconfigVM_Task, 'duration_secs': 0.276625} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2298.921979] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Reconfigured VM instance instance-0000003d to attach disk [datastore1] 57537508-06e7-43a4-95c5-c4399b8bf93f/57537508-06e7-43a4-95c5-c4399b8bf93f.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2298.922309] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Updating instance '57537508-06e7-43a4-95c5-c4399b8bf93f' progress to 50 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2298.928380] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23aaabba-ddc8-40da-a074-5bc6ec92152d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2298.958970] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8634fe97-fe95-4d98-9929-804537a5790d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2298.967160] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b97c7b9-1cfe-4a16-86c1-dd5e39a4d932 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2298.981086] env[62684]: DEBUG nova.compute.provider_tree [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2299.430698] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4f6865a-d987-42f8-9418-2cd319bf8401 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2299.449751] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70810c0d-3207-4d1b-af91-055b26eec3af {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2299.467290] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Updating instance '57537508-06e7-43a4-95c5-c4399b8bf93f' progress to 67 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2299.483877] env[62684]: DEBUG nova.scheduler.client.report [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2299.988014] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.192s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2299.990079] env[62684]: DEBUG oslo_concurrency.lockutils [None req-43e0f947-d632-4e47-bbec-9d7aca7e585a tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.194s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2299.990345] env[62684]: DEBUG nova.objects.instance [None req-43e0f947-d632-4e47-bbec-9d7aca7e585a tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Lazy-loading 'resources' on Instance uuid a9965f71-e965-4144-a64a-6ee43ad20fc0 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2300.024353] env[62684]: DEBUG nova.network.neutron [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Port 0a96e2ce-2335-44e2-940d-26d3afbafa3a binding to destination host cpu-1 is already ACTIVE {{(pid=62684) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2300.027966] env[62684]: INFO nova.network.neutron [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Updating port 61adf5cc-1692-4079-b909-b15313ce9680 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 2300.613204] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f854dbbe-567e-4609-b7c6-e114d6edd506 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2300.621768] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69d6ff9a-1be0-46e2-b045-be91d01d71e6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2300.653810] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6dd2c84-9302-4b84-bfa1-c992e7d9039c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2300.662019] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2720d728-a268-4595-a03f-cd7aec0dd4d7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2300.676059] env[62684]: DEBUG nova.compute.provider_tree [None req-43e0f947-d632-4e47-bbec-9d7aca7e585a tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2301.049266] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "57537508-06e7-43a4-95c5-c4399b8bf93f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2301.049514] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "57537508-06e7-43a4-95c5-c4399b8bf93f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2301.049690] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "57537508-06e7-43a4-95c5-c4399b8bf93f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2301.179639] env[62684]: DEBUG nova.scheduler.client.report [None req-43e0f947-d632-4e47-bbec-9d7aca7e585a tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2301.684709] env[62684]: DEBUG oslo_concurrency.lockutils [None req-43e0f947-d632-4e47-bbec-9d7aca7e585a tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.694s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2301.687156] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.779s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2301.689210] env[62684]: INFO nova.compute.claims [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2301.702339] env[62684]: INFO nova.scheduler.client.report [None req-43e0f947-d632-4e47-bbec-9d7aca7e585a tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Deleted allocations for instance a9965f71-e965-4144-a64a-6ee43ad20fc0 [ 2302.084289] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "refresh_cache-57537508-06e7-43a4-95c5-c4399b8bf93f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2302.084483] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquired lock "refresh_cache-57537508-06e7-43a4-95c5-c4399b8bf93f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2302.084669] env[62684]: DEBUG nova.network.neutron [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2302.208860] env[62684]: DEBUG oslo_concurrency.lockutils [None req-43e0f947-d632-4e47-bbec-9d7aca7e585a tempest-ServerAddressesNegativeTestJSON-1933043178 tempest-ServerAddressesNegativeTestJSON-1933043178-project-member] Lock "a9965f71-e965-4144-a64a-6ee43ad20fc0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.336s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2302.793202] env[62684]: DEBUG nova.network.neutron [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Updating instance_info_cache with network_info: [{"id": "0a96e2ce-2335-44e2-940d-26d3afbafa3a", "address": "fa:16:3e:6d:b8:02", "network": {"id": "1751424b-54a9-4879-9f32-aa15a9bb632c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-120070593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "263c101fcc5e493789b79dfd1ba97cc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92e4d027-e755-417b-8eea-9a8f24b85140", "external-id": "nsx-vlan-transportzone-756", "segmentation_id": 756, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a96e2ce-23", "ovs_interfaceid": "0a96e2ce-2335-44e2-940d-26d3afbafa3a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2302.797860] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d605faaf-44b8-41c9-a335-f1e2cc94d7fa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2302.808299] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27fd788c-21af-48ef-bde9-9557d5fd2a80 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2302.839302] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7b77a91-0b7b-4281-8ba8-af6fce42a681 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2302.846982] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20345584-cf0b-44e1-ad1e-30f9e2b6589d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2302.860515] env[62684]: DEBUG nova.compute.provider_tree [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2303.297916] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Releasing lock "refresh_cache-57537508-06e7-43a4-95c5-c4399b8bf93f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2303.364301] env[62684]: DEBUG nova.scheduler.client.report [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2303.822721] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50473123-c464-4fb2-b66c-88df5c9c40a4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.841822] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13e65177-d5f3-4e01-aaa4-d5412cdf24b6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.848699] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Updating instance '57537508-06e7-43a4-95c5-c4399b8bf93f' progress to 83 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2303.868451] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.181s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2303.868957] env[62684]: DEBUG nova.compute.manager [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2303.929613] env[62684]: DEBUG nova.compute.manager [req-081744f3-e03c-4bac-8234-07b9363ab3c5 req-ef3cb699-d294-452c-99dc-773e5eba3615 service nova] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Received event network-vif-plugged-61adf5cc-1692-4079-b909-b15313ce9680 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2303.929909] env[62684]: DEBUG oslo_concurrency.lockutils [req-081744f3-e03c-4bac-8234-07b9363ab3c5 req-ef3cb699-d294-452c-99dc-773e5eba3615 service nova] Acquiring lock "9f1e9ae9-c082-4fbe-bd21-6e14e40962c1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2303.930061] env[62684]: DEBUG oslo_concurrency.lockutils [req-081744f3-e03c-4bac-8234-07b9363ab3c5 req-ef3cb699-d294-452c-99dc-773e5eba3615 service nova] Lock "9f1e9ae9-c082-4fbe-bd21-6e14e40962c1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2303.930289] env[62684]: DEBUG oslo_concurrency.lockutils [req-081744f3-e03c-4bac-8234-07b9363ab3c5 req-ef3cb699-d294-452c-99dc-773e5eba3615 service nova] Lock "9f1e9ae9-c082-4fbe-bd21-6e14e40962c1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2303.930492] env[62684]: DEBUG nova.compute.manager [req-081744f3-e03c-4bac-8234-07b9363ab3c5 req-ef3cb699-d294-452c-99dc-773e5eba3615 service nova] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] No waiting events found dispatching network-vif-plugged-61adf5cc-1692-4079-b909-b15313ce9680 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2303.930665] env[62684]: WARNING nova.compute.manager [req-081744f3-e03c-4bac-8234-07b9363ab3c5 req-ef3cb699-d294-452c-99dc-773e5eba3615 service nova] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Received unexpected event network-vif-plugged-61adf5cc-1692-4079-b909-b15313ce9680 for instance with vm_state shelved_offloaded and task_state spawning. [ 2304.057927] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Acquiring lock "refresh_cache-9f1e9ae9-c082-4fbe-bd21-6e14e40962c1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2304.058202] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Acquired lock "refresh_cache-9f1e9ae9-c082-4fbe-bd21-6e14e40962c1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2304.058389] env[62684]: DEBUG nova.network.neutron [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2304.355391] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2304.355711] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2bc3b9e6-170f-4f3f-8010-f8f68c6d7991 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2304.364103] env[62684]: DEBUG oslo_vmware.api [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2304.364103] env[62684]: value = "task-2053749" [ 2304.364103] env[62684]: _type = "Task" [ 2304.364103] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2304.372163] env[62684]: DEBUG oslo_vmware.api [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053749, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2304.374385] env[62684]: DEBUG nova.compute.utils [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2304.375705] env[62684]: DEBUG nova.compute.manager [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2304.375870] env[62684]: DEBUG nova.network.neutron [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2304.417208] env[62684]: DEBUG nova.policy [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '96b96927115d49f2a04342784717e58e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '607a0aa1049640d882d7dd490f5f98ea', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2304.759153] env[62684]: DEBUG nova.network.neutron [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Successfully created port: 0d202452-add9-4ae7-b035-6554f287d049 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2304.877353] env[62684]: DEBUG oslo_vmware.api [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053749, 'name': PowerOnVM_Task, 'duration_secs': 0.403232} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2304.880570] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2304.880838] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c64f7e-61eb-44f8-88bd-af55eabb588d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Updating instance '57537508-06e7-43a4-95c5-c4399b8bf93f' progress to 100 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2304.885661] env[62684]: DEBUG nova.compute.manager [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2304.993301] env[62684]: DEBUG nova.network.neutron [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Updating instance_info_cache with network_info: [{"id": "61adf5cc-1692-4079-b909-b15313ce9680", "address": "fa:16:3e:17:9f:65", "network": {"id": "4142ba34-c2e0-4a22-a8dd-be06ba98c6e5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1627792019-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f0e0f0e1dc834134913bd742fa99b52f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61adf5cc-16", "ovs_interfaceid": "61adf5cc-1692-4079-b909-b15313ce9680", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2305.499307] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Releasing lock "refresh_cache-9f1e9ae9-c082-4fbe-bd21-6e14e40962c1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2305.525692] env[62684]: DEBUG nova.virt.hardware [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='046c52228a58dc3f6b7e324b336a963b',container_format='bare',created_at=2025-01-10T07:55:10Z,direct_url=,disk_format='vmdk',id=a08ef668-cfc2-427d-9c76-1c1bbb64d819,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-1887203471-shelved',owner='f0e0f0e1dc834134913bd742fa99b52f',properties=ImageMetaProps,protected=,size=31666688,status='active',tags=,updated_at=2025-01-10T07:55:25Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2305.525961] env[62684]: DEBUG nova.virt.hardware [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2305.526145] env[62684]: DEBUG nova.virt.hardware [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2305.526379] env[62684]: DEBUG nova.virt.hardware [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2305.526543] env[62684]: DEBUG nova.virt.hardware [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2305.526697] env[62684]: DEBUG nova.virt.hardware [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2305.526914] env[62684]: DEBUG nova.virt.hardware [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2305.527096] env[62684]: DEBUG nova.virt.hardware [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2305.527273] env[62684]: DEBUG nova.virt.hardware [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2305.527457] env[62684]: DEBUG nova.virt.hardware [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2305.527643] env[62684]: DEBUG nova.virt.hardware [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2305.528525] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ecf1d7a-0933-49bc-80f4-38b15791673c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2305.537374] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fab2554-0371-4201-b26d-779d86539998 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2305.551860] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:9f:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f2e45023-22b5-458b-826e-9b7eb69ba028', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '61adf5cc-1692-4079-b909-b15313ce9680', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2305.559210] env[62684]: DEBUG oslo.service.loopingcall [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2305.559795] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2305.560045] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e3da218c-9795-4ee3-ad8a-31b2ce945d8e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2305.579235] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2305.579235] env[62684]: value = "task-2053750" [ 2305.579235] env[62684]: _type = "Task" [ 2305.579235] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2305.588513] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053750, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2305.898669] env[62684]: DEBUG nova.compute.manager [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2305.924300] env[62684]: DEBUG nova.virt.hardware [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2305.924709] env[62684]: DEBUG nova.virt.hardware [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2305.924999] env[62684]: DEBUG nova.virt.hardware [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2305.925293] env[62684]: DEBUG nova.virt.hardware [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2305.925522] env[62684]: DEBUG nova.virt.hardware [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2305.925791] env[62684]: DEBUG nova.virt.hardware [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2305.926429] env[62684]: DEBUG nova.virt.hardware [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2305.926507] env[62684]: DEBUG nova.virt.hardware [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2305.926720] env[62684]: DEBUG nova.virt.hardware [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2305.926938] env[62684]: DEBUG nova.virt.hardware [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2305.927204] env[62684]: DEBUG nova.virt.hardware [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2305.928368] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71302e6a-b026-4a74-ac84-fb3358f011b7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2305.939685] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64b9e72a-c7e2-4af5-8610-fcba438a20fa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2305.961199] env[62684]: DEBUG nova.compute.manager [req-5327c009-e14c-42ae-af5f-f8863ecb1038 req-7ff16458-c6e5-494d-b0e4-7ae99e6b2360 service nova] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Received event network-changed-61adf5cc-1692-4079-b909-b15313ce9680 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2305.961461] env[62684]: DEBUG nova.compute.manager [req-5327c009-e14c-42ae-af5f-f8863ecb1038 req-7ff16458-c6e5-494d-b0e4-7ae99e6b2360 service nova] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Refreshing instance network info cache due to event network-changed-61adf5cc-1692-4079-b909-b15313ce9680. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2305.961719] env[62684]: DEBUG oslo_concurrency.lockutils [req-5327c009-e14c-42ae-af5f-f8863ecb1038 req-7ff16458-c6e5-494d-b0e4-7ae99e6b2360 service nova] Acquiring lock "refresh_cache-9f1e9ae9-c082-4fbe-bd21-6e14e40962c1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2305.961932] env[62684]: DEBUG oslo_concurrency.lockutils [req-5327c009-e14c-42ae-af5f-f8863ecb1038 req-7ff16458-c6e5-494d-b0e4-7ae99e6b2360 service nova] Acquired lock "refresh_cache-9f1e9ae9-c082-4fbe-bd21-6e14e40962c1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2305.962131] env[62684]: DEBUG nova.network.neutron [req-5327c009-e14c-42ae-af5f-f8863ecb1038 req-7ff16458-c6e5-494d-b0e4-7ae99e6b2360 service nova] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Refreshing network info cache for port 61adf5cc-1692-4079-b909-b15313ce9680 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2306.089704] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053750, 'name': CreateVM_Task, 'duration_secs': 0.40682} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2306.089857] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2306.090529] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a08ef668-cfc2-427d-9c76-1c1bbb64d819" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2306.090708] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a08ef668-cfc2-427d-9c76-1c1bbb64d819" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2306.091114] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a08ef668-cfc2-427d-9c76-1c1bbb64d819" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2306.091378] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1f08277-36fe-4fd1-918c-789316156775 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2306.096435] env[62684]: DEBUG oslo_vmware.api [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for the task: (returnval){ [ 2306.096435] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52bd0c8c-9d2c-08db-6545-4b185b6d875b" [ 2306.096435] env[62684]: _type = "Task" [ 2306.096435] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2306.105999] env[62684]: DEBUG oslo_vmware.api [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52bd0c8c-9d2c-08db-6545-4b185b6d875b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2306.608070] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a08ef668-cfc2-427d-9c76-1c1bbb64d819" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2306.608405] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Processing image a08ef668-cfc2-427d-9c76-1c1bbb64d819 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2306.608667] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a08ef668-cfc2-427d-9c76-1c1bbb64d819/a08ef668-cfc2-427d-9c76-1c1bbb64d819.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2306.608825] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a08ef668-cfc2-427d-9c76-1c1bbb64d819/a08ef668-cfc2-427d-9c76-1c1bbb64d819.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2306.609019] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2306.609298] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-19c9bfbe-b2e9-4d94-ad67-59c246f754b4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2306.618733] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2306.618935] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2306.619653] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4b1ab41-b22f-415c-85a3-88a2c5187428 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2306.625479] env[62684]: DEBUG oslo_vmware.api [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for the task: (returnval){ [ 2306.625479] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a351af-29a6-f60c-ee30-e3fbf35a3c8c" [ 2306.625479] env[62684]: _type = "Task" [ 2306.625479] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2306.633731] env[62684]: DEBUG oslo_vmware.api [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a351af-29a6-f60c-ee30-e3fbf35a3c8c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2306.641351] env[62684]: DEBUG oslo_concurrency.lockutils [None req-24d4765b-e1bc-48d6-9df7-b5a42fd7d9e0 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "57537508-06e7-43a4-95c5-c4399b8bf93f" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2306.641594] env[62684]: DEBUG oslo_concurrency.lockutils [None req-24d4765b-e1bc-48d6-9df7-b5a42fd7d9e0 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "57537508-06e7-43a4-95c5-c4399b8bf93f" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2306.641783] env[62684]: DEBUG nova.compute.manager [None req-24d4765b-e1bc-48d6-9df7-b5a42fd7d9e0 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Going to confirm migration 5 {{(pid=62684) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 2306.753123] env[62684]: DEBUG nova.network.neutron [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Successfully updated port: 0d202452-add9-4ae7-b035-6554f287d049 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2306.899900] env[62684]: DEBUG nova.network.neutron [req-5327c009-e14c-42ae-af5f-f8863ecb1038 req-7ff16458-c6e5-494d-b0e4-7ae99e6b2360 service nova] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Updated VIF entry in instance network info cache for port 61adf5cc-1692-4079-b909-b15313ce9680. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2306.900373] env[62684]: DEBUG nova.network.neutron [req-5327c009-e14c-42ae-af5f-f8863ecb1038 req-7ff16458-c6e5-494d-b0e4-7ae99e6b2360 service nova] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Updating instance_info_cache with network_info: [{"id": "61adf5cc-1692-4079-b909-b15313ce9680", "address": "fa:16:3e:17:9f:65", "network": {"id": "4142ba34-c2e0-4a22-a8dd-be06ba98c6e5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1627792019-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f0e0f0e1dc834134913bd742fa99b52f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61adf5cc-16", "ovs_interfaceid": "61adf5cc-1692-4079-b909-b15313ce9680", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2307.136536] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Preparing fetch location {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2307.136838] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Fetch image to [datastore2] OSTACK_IMG_a9ae0d98-5401-4100-89c8-876860774d57/OSTACK_IMG_a9ae0d98-5401-4100-89c8-876860774d57.vmdk {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2307.137146] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Downloading stream optimized image a08ef668-cfc2-427d-9c76-1c1bbb64d819 to [datastore2] OSTACK_IMG_a9ae0d98-5401-4100-89c8-876860774d57/OSTACK_IMG_a9ae0d98-5401-4100-89c8-876860774d57.vmdk on the data store datastore2 as vApp {{(pid=62684) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 2307.137348] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Downloading image file data a08ef668-cfc2-427d-9c76-1c1bbb64d819 to the ESX as VM named 'OSTACK_IMG_a9ae0d98-5401-4100-89c8-876860774d57' {{(pid=62684) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 2307.179045] env[62684]: DEBUG oslo_concurrency.lockutils [None req-24d4765b-e1bc-48d6-9df7-b5a42fd7d9e0 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "refresh_cache-57537508-06e7-43a4-95c5-c4399b8bf93f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2307.179787] env[62684]: DEBUG oslo_concurrency.lockutils [None req-24d4765b-e1bc-48d6-9df7-b5a42fd7d9e0 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquired lock "refresh_cache-57537508-06e7-43a4-95c5-c4399b8bf93f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2307.179787] env[62684]: DEBUG nova.network.neutron [None req-24d4765b-e1bc-48d6-9df7-b5a42fd7d9e0 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2307.179787] env[62684]: DEBUG nova.objects.instance [None req-24d4765b-e1bc-48d6-9df7-b5a42fd7d9e0 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lazy-loading 'info_cache' on Instance uuid 57537508-06e7-43a4-95c5-c4399b8bf93f {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2307.206710] env[62684]: DEBUG oslo_vmware.rw_handles [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 2307.206710] env[62684]: value = "resgroup-9" [ 2307.206710] env[62684]: _type = "ResourcePool" [ 2307.206710] env[62684]: }. {{(pid=62684) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 2307.207117] env[62684]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-c7f183ab-8e49-4a37-a41e-bfcfb7193ff6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2307.233976] env[62684]: DEBUG oslo_vmware.rw_handles [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Lease: (returnval){ [ 2307.233976] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]527a28fd-7ab7-297f-a354-c771aa9fda8f" [ 2307.233976] env[62684]: _type = "HttpNfcLease" [ 2307.233976] env[62684]: } obtained for vApp import into resource pool (val){ [ 2307.233976] env[62684]: value = "resgroup-9" [ 2307.233976] env[62684]: _type = "ResourcePool" [ 2307.233976] env[62684]: }. {{(pid=62684) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 2307.234288] env[62684]: DEBUG oslo_vmware.api [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for the lease: (returnval){ [ 2307.234288] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]527a28fd-7ab7-297f-a354-c771aa9fda8f" [ 2307.234288] env[62684]: _type = "HttpNfcLease" [ 2307.234288] env[62684]: } to be ready. {{(pid=62684) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2307.242878] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2307.242878] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]527a28fd-7ab7-297f-a354-c771aa9fda8f" [ 2307.242878] env[62684]: _type = "HttpNfcLease" [ 2307.242878] env[62684]: } is initializing. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2307.261796] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "refresh_cache-69d26980-f42d-4d35-8de3-a85d7a6f0a11" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2307.261985] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquired lock "refresh_cache-69d26980-f42d-4d35-8de3-a85d7a6f0a11" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2307.262155] env[62684]: DEBUG nova.network.neutron [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2307.402990] env[62684]: DEBUG oslo_concurrency.lockutils [req-5327c009-e14c-42ae-af5f-f8863ecb1038 req-7ff16458-c6e5-494d-b0e4-7ae99e6b2360 service nova] Releasing lock "refresh_cache-9f1e9ae9-c082-4fbe-bd21-6e14e40962c1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2307.532375] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7433cc9a-7715-4079-9d6e-1fe75a41a317 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "5b3668f3-219d-4304-bc9e-9b911762085d" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2307.532375] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7433cc9a-7715-4079-9d6e-1fe75a41a317 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "5b3668f3-219d-4304-bc9e-9b911762085d" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2307.743516] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2307.743516] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]527a28fd-7ab7-297f-a354-c771aa9fda8f" [ 2307.743516] env[62684]: _type = "HttpNfcLease" [ 2307.743516] env[62684]: } is initializing. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2307.797165] env[62684]: DEBUG nova.network.neutron [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2307.937759] env[62684]: DEBUG nova.network.neutron [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Updating instance_info_cache with network_info: [{"id": "0d202452-add9-4ae7-b035-6554f287d049", "address": "fa:16:3e:3b:e0:ab", "network": {"id": "b24dd0c0-a394-4ca6-a79a-94535bc1df6f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2023102141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "607a0aa1049640d882d7dd490f5f98ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d202452-ad", "ovs_interfaceid": "0d202452-add9-4ae7-b035-6554f287d049", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2307.985925] env[62684]: DEBUG nova.compute.manager [req-8f780cdd-bb39-44c1-b677-53968e4a9024 req-6f435b9a-2ac3-47df-a4c1-851006b83ae5 service nova] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Received event network-vif-plugged-0d202452-add9-4ae7-b035-6554f287d049 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2307.986183] env[62684]: DEBUG oslo_concurrency.lockutils [req-8f780cdd-bb39-44c1-b677-53968e4a9024 req-6f435b9a-2ac3-47df-a4c1-851006b83ae5 service nova] Acquiring lock "69d26980-f42d-4d35-8de3-a85d7a6f0a11-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2307.986400] env[62684]: DEBUG oslo_concurrency.lockutils [req-8f780cdd-bb39-44c1-b677-53968e4a9024 req-6f435b9a-2ac3-47df-a4c1-851006b83ae5 service nova] Lock "69d26980-f42d-4d35-8de3-a85d7a6f0a11-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2307.986578] env[62684]: DEBUG oslo_concurrency.lockutils [req-8f780cdd-bb39-44c1-b677-53968e4a9024 req-6f435b9a-2ac3-47df-a4c1-851006b83ae5 service nova] Lock "69d26980-f42d-4d35-8de3-a85d7a6f0a11-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2307.986751] env[62684]: DEBUG nova.compute.manager [req-8f780cdd-bb39-44c1-b677-53968e4a9024 req-6f435b9a-2ac3-47df-a4c1-851006b83ae5 service nova] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] No waiting events found dispatching network-vif-plugged-0d202452-add9-4ae7-b035-6554f287d049 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2307.986918] env[62684]: WARNING nova.compute.manager [req-8f780cdd-bb39-44c1-b677-53968e4a9024 req-6f435b9a-2ac3-47df-a4c1-851006b83ae5 service nova] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Received unexpected event network-vif-plugged-0d202452-add9-4ae7-b035-6554f287d049 for instance with vm_state building and task_state spawning. [ 2307.987277] env[62684]: DEBUG nova.compute.manager [req-8f780cdd-bb39-44c1-b677-53968e4a9024 req-6f435b9a-2ac3-47df-a4c1-851006b83ae5 service nova] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Received event network-changed-0d202452-add9-4ae7-b035-6554f287d049 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2307.987487] env[62684]: DEBUG nova.compute.manager [req-8f780cdd-bb39-44c1-b677-53968e4a9024 req-6f435b9a-2ac3-47df-a4c1-851006b83ae5 service nova] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Refreshing instance network info cache due to event network-changed-0d202452-add9-4ae7-b035-6554f287d049. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2307.987663] env[62684]: DEBUG oslo_concurrency.lockutils [req-8f780cdd-bb39-44c1-b677-53968e4a9024 req-6f435b9a-2ac3-47df-a4c1-851006b83ae5 service nova] Acquiring lock "refresh_cache-69d26980-f42d-4d35-8de3-a85d7a6f0a11" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2308.035758] env[62684]: DEBUG nova.compute.utils [None req-7433cc9a-7715-4079-9d6e-1fe75a41a317 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2308.243924] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2308.243924] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]527a28fd-7ab7-297f-a354-c771aa9fda8f" [ 2308.243924] env[62684]: _type = "HttpNfcLease" [ 2308.243924] env[62684]: } is initializing. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2308.440912] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Releasing lock "refresh_cache-69d26980-f42d-4d35-8de3-a85d7a6f0a11" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2308.441486] env[62684]: DEBUG nova.compute.manager [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Instance network_info: |[{"id": "0d202452-add9-4ae7-b035-6554f287d049", "address": "fa:16:3e:3b:e0:ab", "network": {"id": "b24dd0c0-a394-4ca6-a79a-94535bc1df6f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2023102141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "607a0aa1049640d882d7dd490f5f98ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d202452-ad", "ovs_interfaceid": "0d202452-add9-4ae7-b035-6554f287d049", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2308.441960] env[62684]: DEBUG oslo_concurrency.lockutils [req-8f780cdd-bb39-44c1-b677-53968e4a9024 req-6f435b9a-2ac3-47df-a4c1-851006b83ae5 service nova] Acquired lock "refresh_cache-69d26980-f42d-4d35-8de3-a85d7a6f0a11" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2308.442303] env[62684]: DEBUG nova.network.neutron [req-8f780cdd-bb39-44c1-b677-53968e4a9024 req-6f435b9a-2ac3-47df-a4c1-851006b83ae5 service nova] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Refreshing network info cache for port 0d202452-add9-4ae7-b035-6554f287d049 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2308.445140] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3b:e0:ab', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0d202452-add9-4ae7-b035-6554f287d049', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2308.453493] env[62684]: DEBUG oslo.service.loopingcall [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2308.455231] env[62684]: DEBUG nova.network.neutron [None req-24d4765b-e1bc-48d6-9df7-b5a42fd7d9e0 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Updating instance_info_cache with network_info: [{"id": "0a96e2ce-2335-44e2-940d-26d3afbafa3a", "address": "fa:16:3e:6d:b8:02", "network": {"id": "1751424b-54a9-4879-9f32-aa15a9bb632c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-120070593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "263c101fcc5e493789b79dfd1ba97cc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92e4d027-e755-417b-8eea-9a8f24b85140", "external-id": "nsx-vlan-transportzone-756", "segmentation_id": 756, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a96e2ce-23", "ovs_interfaceid": "0a96e2ce-2335-44e2-940d-26d3afbafa3a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2308.457191] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2308.457292] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5279ad47-5d1a-4cbe-a124-be33726b285a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2308.481643] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2308.481643] env[62684]: value = "task-2053752" [ 2308.481643] env[62684]: _type = "Task" [ 2308.481643] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2308.490879] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053752, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2308.540034] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7433cc9a-7715-4079-9d6e-1fe75a41a317 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "5b3668f3-219d-4304-bc9e-9b911762085d" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2308.745360] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2308.745360] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]527a28fd-7ab7-297f-a354-c771aa9fda8f" [ 2308.745360] env[62684]: _type = "HttpNfcLease" [ 2308.745360] env[62684]: } is initializing. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2308.957457] env[62684]: DEBUG oslo_concurrency.lockutils [None req-24d4765b-e1bc-48d6-9df7-b5a42fd7d9e0 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Releasing lock "refresh_cache-57537508-06e7-43a4-95c5-c4399b8bf93f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2308.957774] env[62684]: DEBUG nova.objects.instance [None req-24d4765b-e1bc-48d6-9df7-b5a42fd7d9e0 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lazy-loading 'migration_context' on Instance uuid 57537508-06e7-43a4-95c5-c4399b8bf93f {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2308.993272] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053752, 'name': CreateVM_Task, 'duration_secs': 0.38152} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2308.993441] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2308.994151] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2308.994337] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2308.994662] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2308.994928] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19211ad4-54c9-4faa-a894-0bb49d013d5b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.000497] env[62684]: DEBUG oslo_vmware.api [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2309.000497] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b86fba-47e0-cef8-d067-84c2b7c95e40" [ 2309.000497] env[62684]: _type = "Task" [ 2309.000497] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2309.008425] env[62684]: DEBUG oslo_vmware.api [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b86fba-47e0-cef8-d067-84c2b7c95e40, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2309.203446] env[62684]: DEBUG nova.network.neutron [req-8f780cdd-bb39-44c1-b677-53968e4a9024 req-6f435b9a-2ac3-47df-a4c1-851006b83ae5 service nova] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Updated VIF entry in instance network info cache for port 0d202452-add9-4ae7-b035-6554f287d049. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2309.203446] env[62684]: DEBUG nova.network.neutron [req-8f780cdd-bb39-44c1-b677-53968e4a9024 req-6f435b9a-2ac3-47df-a4c1-851006b83ae5 service nova] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Updating instance_info_cache with network_info: [{"id": "0d202452-add9-4ae7-b035-6554f287d049", "address": "fa:16:3e:3b:e0:ab", "network": {"id": "b24dd0c0-a394-4ca6-a79a-94535bc1df6f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2023102141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "607a0aa1049640d882d7dd490f5f98ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d202452-ad", "ovs_interfaceid": "0d202452-add9-4ae7-b035-6554f287d049", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2309.245679] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2309.245679] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]527a28fd-7ab7-297f-a354-c771aa9fda8f" [ 2309.245679] env[62684]: _type = "HttpNfcLease" [ 2309.245679] env[62684]: } is initializing. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2309.461206] env[62684]: DEBUG nova.objects.base [None req-24d4765b-e1bc-48d6-9df7-b5a42fd7d9e0 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Object Instance<57537508-06e7-43a4-95c5-c4399b8bf93f> lazy-loaded attributes: info_cache,migration_context {{(pid=62684) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2309.462247] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-640383ad-281a-4494-8be0-8f671d92d902 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.485036] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3264f82c-a891-4c3b-87bb-88e204f1158e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.491024] env[62684]: DEBUG oslo_vmware.api [None req-24d4765b-e1bc-48d6-9df7-b5a42fd7d9e0 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2309.491024] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52083ea4-6cee-26eb-6458-6a5d81315130" [ 2309.491024] env[62684]: _type = "Task" [ 2309.491024] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2309.500492] env[62684]: DEBUG oslo_vmware.api [None req-24d4765b-e1bc-48d6-9df7-b5a42fd7d9e0 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52083ea4-6cee-26eb-6458-6a5d81315130, 'name': SearchDatastore_Task, 'duration_secs': 0.007275} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2309.500810] env[62684]: DEBUG oslo_concurrency.lockutils [None req-24d4765b-e1bc-48d6-9df7-b5a42fd7d9e0 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2309.501078] env[62684]: DEBUG oslo_concurrency.lockutils [None req-24d4765b-e1bc-48d6-9df7-b5a42fd7d9e0 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2309.514030] env[62684]: DEBUG oslo_vmware.api [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b86fba-47e0-cef8-d067-84c2b7c95e40, 'name': SearchDatastore_Task, 'duration_secs': 0.020438} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2309.514030] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2309.514220] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2309.514448] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2309.514595] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2309.514802] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2309.515200] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-17e375d0-0b81-4707-9ed1-b55e20424bd4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.526317] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2309.526536] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2309.527584] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f32eeba-82a6-42d9-8546-87ba12418f19 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.533637] env[62684]: DEBUG oslo_vmware.api [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2309.533637] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52cbf33a-1598-64af-91fd-0996b3fa1169" [ 2309.533637] env[62684]: _type = "Task" [ 2309.533637] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2309.545644] env[62684]: DEBUG oslo_vmware.api [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52cbf33a-1598-64af-91fd-0996b3fa1169, 'name': SearchDatastore_Task, 'duration_secs': 0.009387} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2309.546443] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4b364e4-f0be-4447-8646-4b6d6807ecee {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.552255] env[62684]: DEBUG oslo_vmware.api [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2309.552255] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5241fb2f-1c71-58da-cc49-cf06af529450" [ 2309.552255] env[62684]: _type = "Task" [ 2309.552255] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2309.560953] env[62684]: DEBUG oslo_vmware.api [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5241fb2f-1c71-58da-cc49-cf06af529450, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2309.600406] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7433cc9a-7715-4079-9d6e-1fe75a41a317 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "5b3668f3-219d-4304-bc9e-9b911762085d" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2309.600711] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7433cc9a-7715-4079-9d6e-1fe75a41a317 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "5b3668f3-219d-4304-bc9e-9b911762085d" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2309.600952] env[62684]: INFO nova.compute.manager [None req-7433cc9a-7715-4079-9d6e-1fe75a41a317 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Attaching volume a930b2fe-6aed-4e27-967f-d8cfdc8a6d1b to /dev/sdb [ 2309.631141] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6372ceb-b45a-4269-909d-ff06d33a1e71 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.638633] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a552a7e3-a700-4cdc-8a44-d810ff3832a6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.652244] env[62684]: DEBUG nova.virt.block_device [None req-7433cc9a-7715-4079-9d6e-1fe75a41a317 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Updating existing volume attachment record: 8d7db9c9-4cbf-4f32-8350-97db36c2af99 {{(pid=62684) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2309.706485] env[62684]: DEBUG oslo_concurrency.lockutils [req-8f780cdd-bb39-44c1-b677-53968e4a9024 req-6f435b9a-2ac3-47df-a4c1-851006b83ae5 service nova] Releasing lock "refresh_cache-69d26980-f42d-4d35-8de3-a85d7a6f0a11" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2309.745316] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2309.745316] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]527a28fd-7ab7-297f-a354-c771aa9fda8f" [ 2309.745316] env[62684]: _type = "HttpNfcLease" [ 2309.745316] env[62684]: } is ready. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2309.745960] env[62684]: DEBUG oslo_vmware.rw_handles [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2309.745960] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]527a28fd-7ab7-297f-a354-c771aa9fda8f" [ 2309.745960] env[62684]: _type = "HttpNfcLease" [ 2309.745960] env[62684]: }. {{(pid=62684) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 2309.746323] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19921dd2-679a-4ff5-8d10-23df5ba6ee24 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.752873] env[62684]: DEBUG oslo_vmware.rw_handles [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521f7174-0ed3-0282-e2d2-777a1943f0a2/disk-0.vmdk from lease info. {{(pid=62684) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2309.753064] env[62684]: DEBUG oslo_vmware.rw_handles [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Creating HTTP connection to write to file with size = 31666688 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521f7174-0ed3-0282-e2d2-777a1943f0a2/disk-0.vmdk. {{(pid=62684) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2309.815564] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f8698243-a0b7-4dff-a7a7-f647132291bd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.067275] env[62684]: DEBUG oslo_vmware.api [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5241fb2f-1c71-58da-cc49-cf06af529450, 'name': SearchDatastore_Task, 'duration_secs': 0.009009} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2310.069610] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2310.069894] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 69d26980-f42d-4d35-8de3-a85d7a6f0a11/69d26980-f42d-4d35-8de3-a85d7a6f0a11.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2310.070217] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f928db25-d8fb-4f4c-b79e-4b67702bdbd7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.082435] env[62684]: DEBUG oslo_vmware.api [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2310.082435] env[62684]: value = "task-2053756" [ 2310.082435] env[62684]: _type = "Task" [ 2310.082435] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2310.099188] env[62684]: DEBUG oslo_vmware.api [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053756, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2310.132136] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d35abac8-c273-4d90-a853-6e105aaa68c9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.144540] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef8a70fb-520c-4811-9126-ec25566c5380 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.185922] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4918da2-dea8-4218-8f2f-d28913e71572 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.197335] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48e44dab-2658-44c4-ad82-6ec918b3226c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.216469] env[62684]: DEBUG nova.compute.provider_tree [None req-24d4765b-e1bc-48d6-9df7-b5a42fd7d9e0 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2310.596767] env[62684]: DEBUG oslo_vmware.api [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053756, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.481423} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2310.597089] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 69d26980-f42d-4d35-8de3-a85d7a6f0a11/69d26980-f42d-4d35-8de3-a85d7a6f0a11.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2310.597323] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2310.597615] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-26f5d6e9-bf83-4120-acf6-8db78ac7531f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.605029] env[62684]: DEBUG oslo_vmware.api [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2310.605029] env[62684]: value = "task-2053757" [ 2310.605029] env[62684]: _type = "Task" [ 2310.605029] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2310.613400] env[62684]: DEBUG oslo_vmware.api [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053757, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2310.723120] env[62684]: DEBUG nova.scheduler.client.report [None req-24d4765b-e1bc-48d6-9df7-b5a42fd7d9e0 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2311.006710] env[62684]: DEBUG oslo_vmware.rw_handles [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Completed reading data from the image iterator. {{(pid=62684) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2311.006998] env[62684]: DEBUG oslo_vmware.rw_handles [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521f7174-0ed3-0282-e2d2-777a1943f0a2/disk-0.vmdk. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2311.009021] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49b46094-4155-46b8-b842-ad260f31abc5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.016448] env[62684]: DEBUG oslo_vmware.rw_handles [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521f7174-0ed3-0282-e2d2-777a1943f0a2/disk-0.vmdk is in state: ready. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2311.016620] env[62684]: DEBUG oslo_vmware.rw_handles [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521f7174-0ed3-0282-e2d2-777a1943f0a2/disk-0.vmdk. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 2311.016901] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-1d37393a-473b-4357-9fac-e58375c14c0c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.115824] env[62684]: DEBUG oslo_vmware.api [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053757, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076239} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2311.116203] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2311.116964] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3f89337-b5b3-47e1-bf58-3fd837c72388 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.139442] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Reconfiguring VM instance instance-0000006f to attach disk [datastore2] 69d26980-f42d-4d35-8de3-a85d7a6f0a11/69d26980-f42d-4d35-8de3-a85d7a6f0a11.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2311.139697] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b9182d28-710b-4182-860f-da745166a4ff {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.161380] env[62684]: DEBUG oslo_vmware.api [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2311.161380] env[62684]: value = "task-2053758" [ 2311.161380] env[62684]: _type = "Task" [ 2311.161380] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2311.170065] env[62684]: DEBUG oslo_vmware.api [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053758, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2311.209299] env[62684]: DEBUG oslo_vmware.rw_handles [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521f7174-0ed3-0282-e2d2-777a1943f0a2/disk-0.vmdk. {{(pid=62684) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 2311.209547] env[62684]: INFO nova.virt.vmwareapi.images [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Downloaded image file data a08ef668-cfc2-427d-9c76-1c1bbb64d819 [ 2311.210482] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d4f3a6f-62b6-4d99-9832-8020cf76f064 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.227909] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-11f968ac-71da-4852-a5d0-f017e1bc371f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.255313] env[62684]: INFO nova.virt.vmwareapi.images [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] The imported VM was unregistered [ 2311.257659] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Caching image {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2311.257905] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Creating directory with path [datastore2] devstack-image-cache_base/a08ef668-cfc2-427d-9c76-1c1bbb64d819 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2311.258251] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6983e9cc-9c9d-4fd5-9efd-39b9b2cb1852 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.292862] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Created directory with path [datastore2] devstack-image-cache_base/a08ef668-cfc2-427d-9c76-1c1bbb64d819 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2311.293091] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_a9ae0d98-5401-4100-89c8-876860774d57/OSTACK_IMG_a9ae0d98-5401-4100-89c8-876860774d57.vmdk to [datastore2] devstack-image-cache_base/a08ef668-cfc2-427d-9c76-1c1bbb64d819/a08ef668-cfc2-427d-9c76-1c1bbb64d819.vmdk. {{(pid=62684) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 2311.293362] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-14dc7657-b9a0-4556-a46b-1794d666d559 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.301169] env[62684]: DEBUG oslo_vmware.api [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for the task: (returnval){ [ 2311.301169] env[62684]: value = "task-2053760" [ 2311.301169] env[62684]: _type = "Task" [ 2311.301169] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2311.311547] env[62684]: DEBUG oslo_vmware.api [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053760, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2311.672733] env[62684]: DEBUG oslo_vmware.api [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053758, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2311.738198] env[62684]: DEBUG oslo_concurrency.lockutils [None req-24d4765b-e1bc-48d6-9df7-b5a42fd7d9e0 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.237s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2311.813219] env[62684]: DEBUG oslo_vmware.api [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053760, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2312.173433] env[62684]: DEBUG oslo_vmware.api [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053758, 'name': ReconfigVM_Task, 'duration_secs': 1.003651} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2312.173804] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Reconfigured VM instance instance-0000006f to attach disk [datastore2] 69d26980-f42d-4d35-8de3-a85d7a6f0a11/69d26980-f42d-4d35-8de3-a85d7a6f0a11.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2312.174253] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dc344eea-2ae1-4240-b28e-ac290dae4569 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.182789] env[62684]: DEBUG oslo_vmware.api [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2312.182789] env[62684]: value = "task-2053762" [ 2312.182789] env[62684]: _type = "Task" [ 2312.182789] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2312.192848] env[62684]: DEBUG oslo_vmware.api [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053762, 'name': Rename_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2312.307939] env[62684]: INFO nova.scheduler.client.report [None req-24d4765b-e1bc-48d6-9df7-b5a42fd7d9e0 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Deleted allocation for migration cc16ccfa-7dfe-43f1-b442-519a0a058755 [ 2312.317930] env[62684]: DEBUG oslo_vmware.api [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053760, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2312.693691] env[62684]: DEBUG oslo_vmware.api [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053762, 'name': Rename_Task, 'duration_secs': 0.376116} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2312.694103] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2312.694367] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f6faf01a-ff2a-44dd-98bb-2c1540233a27 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.702557] env[62684]: DEBUG oslo_vmware.api [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2312.702557] env[62684]: value = "task-2053763" [ 2312.702557] env[62684]: _type = "Task" [ 2312.702557] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2312.712664] env[62684]: DEBUG oslo_vmware.api [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053763, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2312.730544] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1e0e5e96-a535-4693-9463-a16fcde8cf2f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "57537508-06e7-43a4-95c5-c4399b8bf93f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2312.817425] env[62684]: DEBUG oslo_concurrency.lockutils [None req-24d4765b-e1bc-48d6-9df7-b5a42fd7d9e0 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "57537508-06e7-43a4-95c5-c4399b8bf93f" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.175s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2312.819358] env[62684]: DEBUG oslo_vmware.api [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053760, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2312.820111] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1e0e5e96-a535-4693-9463-a16fcde8cf2f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "57537508-06e7-43a4-95c5-c4399b8bf93f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.090s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2312.820551] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1e0e5e96-a535-4693-9463-a16fcde8cf2f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "57537508-06e7-43a4-95c5-c4399b8bf93f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2312.820963] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1e0e5e96-a535-4693-9463-a16fcde8cf2f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "57537508-06e7-43a4-95c5-c4399b8bf93f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2312.821326] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1e0e5e96-a535-4693-9463-a16fcde8cf2f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "57537508-06e7-43a4-95c5-c4399b8bf93f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2312.824227] env[62684]: INFO nova.compute.manager [None req-1e0e5e96-a535-4693-9463-a16fcde8cf2f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Terminating instance [ 2312.827016] env[62684]: DEBUG nova.compute.manager [None req-1e0e5e96-a535-4693-9463-a16fcde8cf2f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2312.827446] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-1e0e5e96-a535-4693-9463-a16fcde8cf2f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2312.828862] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d9c9bb7-2c29-49c4-ab1b-f57bf24c5330 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.843400] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e0e5e96-a535-4693-9463-a16fcde8cf2f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2312.843868] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2e107bd2-af2d-40ca-9144-674bf1d6332f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.853512] env[62684]: DEBUG oslo_vmware.api [None req-1e0e5e96-a535-4693-9463-a16fcde8cf2f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2312.853512] env[62684]: value = "task-2053764" [ 2312.853512] env[62684]: _type = "Task" [ 2312.853512] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2312.869715] env[62684]: DEBUG oslo_vmware.api [None req-1e0e5e96-a535-4693-9463-a16fcde8cf2f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053764, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2313.216516] env[62684]: DEBUG oslo_vmware.api [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053763, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2313.315709] env[62684]: DEBUG oslo_vmware.api [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053760, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2313.364607] env[62684]: DEBUG oslo_vmware.api [None req-1e0e5e96-a535-4693-9463-a16fcde8cf2f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053764, 'name': PowerOffVM_Task, 'duration_secs': 0.192869} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2313.364909] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e0e5e96-a535-4693-9463-a16fcde8cf2f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2313.365132] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-1e0e5e96-a535-4693-9463-a16fcde8cf2f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2313.365429] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-51b9bee7-e4a3-487c-a2d8-d0fc0f999554 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2313.652451] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-1e0e5e96-a535-4693-9463-a16fcde8cf2f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2313.652788] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-1e0e5e96-a535-4693-9463-a16fcde8cf2f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2313.653067] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e0e5e96-a535-4693-9463-a16fcde8cf2f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Deleting the datastore file [datastore1] 57537508-06e7-43a4-95c5-c4399b8bf93f {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2313.653394] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d757503e-34d4-4b8c-8790-9ff42095f7cd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2313.661898] env[62684]: DEBUG oslo_vmware.api [None req-1e0e5e96-a535-4693-9463-a16fcde8cf2f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2313.661898] env[62684]: value = "task-2053766" [ 2313.661898] env[62684]: _type = "Task" [ 2313.661898] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2313.670146] env[62684]: DEBUG oslo_vmware.api [None req-1e0e5e96-a535-4693-9463-a16fcde8cf2f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053766, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2313.714293] env[62684]: DEBUG oslo_vmware.api [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053763, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2313.815168] env[62684]: DEBUG oslo_vmware.api [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053760, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.384354} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2313.815476] env[62684]: INFO nova.virt.vmwareapi.ds_util [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_a9ae0d98-5401-4100-89c8-876860774d57/OSTACK_IMG_a9ae0d98-5401-4100-89c8-876860774d57.vmdk to [datastore2] devstack-image-cache_base/a08ef668-cfc2-427d-9c76-1c1bbb64d819/a08ef668-cfc2-427d-9c76-1c1bbb64d819.vmdk. [ 2313.815677] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Cleaning up location [datastore2] OSTACK_IMG_a9ae0d98-5401-4100-89c8-876860774d57 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 2313.815844] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_a9ae0d98-5401-4100-89c8-876860774d57 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2313.816138] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a79dfeb1-03dc-4680-ba96-1437c396a726 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2313.825973] env[62684]: DEBUG oslo_vmware.api [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for the task: (returnval){ [ 2313.825973] env[62684]: value = "task-2053767" [ 2313.825973] env[62684]: _type = "Task" [ 2313.825973] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2313.835690] env[62684]: DEBUG oslo_vmware.api [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053767, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2314.172548] env[62684]: DEBUG oslo_vmware.api [None req-1e0e5e96-a535-4693-9463-a16fcde8cf2f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053766, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.166358} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2314.172759] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e0e5e96-a535-4693-9463-a16fcde8cf2f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2314.172987] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-1e0e5e96-a535-4693-9463-a16fcde8cf2f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2314.173201] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-1e0e5e96-a535-4693-9463-a16fcde8cf2f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2314.173387] env[62684]: INFO nova.compute.manager [None req-1e0e5e96-a535-4693-9463-a16fcde8cf2f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Took 1.35 seconds to destroy the instance on the hypervisor. [ 2314.173636] env[62684]: DEBUG oslo.service.loopingcall [None req-1e0e5e96-a535-4693-9463-a16fcde8cf2f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2314.173839] env[62684]: DEBUG nova.compute.manager [-] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2314.173942] env[62684]: DEBUG nova.network.neutron [-] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2314.194192] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-7433cc9a-7715-4079-9d6e-1fe75a41a317 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Volume attach. Driver type: vmdk {{(pid=62684) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2314.194401] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-7433cc9a-7715-4079-9d6e-1fe75a41a317 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421415', 'volume_id': 'a930b2fe-6aed-4e27-967f-d8cfdc8a6d1b', 'name': 'volume-a930b2fe-6aed-4e27-967f-d8cfdc8a6d1b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5b3668f3-219d-4304-bc9e-9b911762085d', 'attached_at': '', 'detached_at': '', 'volume_id': 'a930b2fe-6aed-4e27-967f-d8cfdc8a6d1b', 'serial': 'a930b2fe-6aed-4e27-967f-d8cfdc8a6d1b'} {{(pid=62684) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2314.195267] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e238181a-001a-4fdc-8882-dbe87cd82167 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.214814] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-417c193a-9138-4603-b7b5-3f42ca0e3e67 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.222202] env[62684]: DEBUG oslo_vmware.api [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053763, 'name': PowerOnVM_Task, 'duration_secs': 1.137449} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2314.236146] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2314.236461] env[62684]: INFO nova.compute.manager [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Took 8.34 seconds to spawn the instance on the hypervisor. [ 2314.236725] env[62684]: DEBUG nova.compute.manager [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2314.244274] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-7433cc9a-7715-4079-9d6e-1fe75a41a317 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] volume-a930b2fe-6aed-4e27-967f-d8cfdc8a6d1b/volume-a930b2fe-6aed-4e27-967f-d8cfdc8a6d1b.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2314.247049] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f213cd5-b688-4842-9a63-a4edef66a2f0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.249493] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-379cdd61-222b-4565-aae0-872ae0671898 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.269906] env[62684]: DEBUG oslo_vmware.api [None req-7433cc9a-7715-4079-9d6e-1fe75a41a317 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2314.269906] env[62684]: value = "task-2053768" [ 2314.269906] env[62684]: _type = "Task" [ 2314.269906] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2314.278391] env[62684]: DEBUG oslo_vmware.api [None req-7433cc9a-7715-4079-9d6e-1fe75a41a317 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053768, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2314.335874] env[62684]: DEBUG oslo_vmware.api [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053767, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.038243} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2314.336233] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2314.336438] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a08ef668-cfc2-427d-9c76-1c1bbb64d819/a08ef668-cfc2-427d-9c76-1c1bbb64d819.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2314.336692] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a08ef668-cfc2-427d-9c76-1c1bbb64d819/a08ef668-cfc2-427d-9c76-1c1bbb64d819.vmdk to [datastore2] 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1/9f1e9ae9-c082-4fbe-bd21-6e14e40962c1.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2314.337048] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f4ba87eb-acce-4227-ba91-5abae5661140 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.344411] env[62684]: DEBUG oslo_vmware.api [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for the task: (returnval){ [ 2314.344411] env[62684]: value = "task-2053769" [ 2314.344411] env[62684]: _type = "Task" [ 2314.344411] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2314.353285] env[62684]: DEBUG oslo_vmware.api [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053769, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2314.695125] env[62684]: DEBUG nova.compute.manager [req-a0b00064-1a2a-4917-a8a3-bb9a55ae8c6a req-fd34affe-294e-48d7-ba37-cec3dccf2267 service nova] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Received event network-vif-deleted-0a96e2ce-2335-44e2-940d-26d3afbafa3a {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2314.695352] env[62684]: INFO nova.compute.manager [req-a0b00064-1a2a-4917-a8a3-bb9a55ae8c6a req-fd34affe-294e-48d7-ba37-cec3dccf2267 service nova] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Neutron deleted interface 0a96e2ce-2335-44e2-940d-26d3afbafa3a; detaching it from the instance and deleting it from the info cache [ 2314.695553] env[62684]: DEBUG nova.network.neutron [req-a0b00064-1a2a-4917-a8a3-bb9a55ae8c6a req-fd34affe-294e-48d7-ba37-cec3dccf2267 service nova] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2314.783010] env[62684]: INFO nova.compute.manager [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Took 15.89 seconds to build instance. [ 2314.788507] env[62684]: DEBUG oslo_vmware.api [None req-7433cc9a-7715-4079-9d6e-1fe75a41a317 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053768, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2314.855360] env[62684]: DEBUG oslo_vmware.api [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053769, 'name': CopyVirtualDisk_Task} progress is 12%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2315.174852] env[62684]: DEBUG nova.network.neutron [-] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2315.198493] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3cfea78e-7b1f-469c-aa7f-1ae0ff15675a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2315.210047] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2d2fd2d-43ce-4bce-a06d-93f89e15a7a6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2315.242014] env[62684]: DEBUG nova.compute.manager [req-a0b00064-1a2a-4917-a8a3-bb9a55ae8c6a req-fd34affe-294e-48d7-ba37-cec3dccf2267 service nova] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Detach interface failed, port_id=0a96e2ce-2335-44e2-940d-26d3afbafa3a, reason: Instance 57537508-06e7-43a4-95c5-c4399b8bf93f could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2315.289300] env[62684]: DEBUG oslo_vmware.api [None req-7433cc9a-7715-4079-9d6e-1fe75a41a317 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053768, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2315.289852] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33e1586a-23c8-4f1e-8efa-c40c4b2fb6a6 tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "69d26980-f42d-4d35-8de3-a85d7a6f0a11" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.420s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2315.356036] env[62684]: DEBUG oslo_vmware.api [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053769, 'name': CopyVirtualDisk_Task} progress is 32%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2315.678067] env[62684]: INFO nova.compute.manager [-] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Took 1.50 seconds to deallocate network for instance. [ 2315.788586] env[62684]: DEBUG oslo_vmware.api [None req-7433cc9a-7715-4079-9d6e-1fe75a41a317 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053768, 'name': ReconfigVM_Task, 'duration_secs': 1.185875} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2315.788873] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-7433cc9a-7715-4079-9d6e-1fe75a41a317 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Reconfigured VM instance instance-0000006c to attach disk [datastore2] volume-a930b2fe-6aed-4e27-967f-d8cfdc8a6d1b/volume-a930b2fe-6aed-4e27-967f-d8cfdc8a6d1b.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2315.794153] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8dfc5182-9b0f-48d9-a69b-468ca3d9641e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2315.811080] env[62684]: DEBUG oslo_vmware.api [None req-7433cc9a-7715-4079-9d6e-1fe75a41a317 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2315.811080] env[62684]: value = "task-2053770" [ 2315.811080] env[62684]: _type = "Task" [ 2315.811080] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2315.821022] env[62684]: DEBUG oslo_vmware.api [None req-7433cc9a-7715-4079-9d6e-1fe75a41a317 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053770, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2315.856789] env[62684]: DEBUG oslo_vmware.api [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053769, 'name': CopyVirtualDisk_Task} progress is 54%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2316.186627] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1e0e5e96-a535-4693-9463-a16fcde8cf2f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2316.187016] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1e0e5e96-a535-4693-9463-a16fcde8cf2f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2316.187252] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1e0e5e96-a535-4693-9463-a16fcde8cf2f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2316.205580] env[62684]: INFO nova.scheduler.client.report [None req-1e0e5e96-a535-4693-9463-a16fcde8cf2f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Deleted allocations for instance 57537508-06e7-43a4-95c5-c4399b8bf93f [ 2316.321562] env[62684]: DEBUG oslo_vmware.api [None req-7433cc9a-7715-4079-9d6e-1fe75a41a317 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053770, 'name': ReconfigVM_Task, 'duration_secs': 0.214335} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2316.321960] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-7433cc9a-7715-4079-9d6e-1fe75a41a317 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421415', 'volume_id': 'a930b2fe-6aed-4e27-967f-d8cfdc8a6d1b', 'name': 'volume-a930b2fe-6aed-4e27-967f-d8cfdc8a6d1b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5b3668f3-219d-4304-bc9e-9b911762085d', 'attached_at': '', 'detached_at': '', 'volume_id': 'a930b2fe-6aed-4e27-967f-d8cfdc8a6d1b', 'serial': 'a930b2fe-6aed-4e27-967f-d8cfdc8a6d1b'} {{(pid=62684) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2316.361020] env[62684]: DEBUG oslo_vmware.api [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053769, 'name': CopyVirtualDisk_Task} progress is 74%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2316.532501] env[62684]: DEBUG nova.compute.manager [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Stashing vm_state: active {{(pid=62684) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 2316.715457] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1e0e5e96-a535-4693-9463-a16fcde8cf2f tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "57537508-06e7-43a4-95c5-c4399b8bf93f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.895s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2316.859256] env[62684]: DEBUG oslo_vmware.api [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053769, 'name': CopyVirtualDisk_Task} progress is 97%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2317.054516] env[62684]: DEBUG oslo_concurrency.lockutils [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2317.054789] env[62684]: DEBUG oslo_concurrency.lockutils [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2317.301072] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2317.359256] env[62684]: DEBUG oslo_vmware.api [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053769, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.599756} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2317.359658] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a08ef668-cfc2-427d-9c76-1c1bbb64d819/a08ef668-cfc2-427d-9c76-1c1bbb64d819.vmdk to [datastore2] 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1/9f1e9ae9-c082-4fbe-bd21-6e14e40962c1.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2317.360365] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd79f8c5-bd3c-4e9d-915b-d5c6316d7b1d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.363852] env[62684]: DEBUG nova.objects.instance [None req-7433cc9a-7715-4079-9d6e-1fe75a41a317 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lazy-loading 'flavor' on Instance uuid 5b3668f3-219d-4304-bc9e-9b911762085d {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2317.384197] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Reconfiguring VM instance instance-00000068 to attach disk [datastore2] 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1/9f1e9ae9-c082-4fbe-bd21-6e14e40962c1.vmdk or device None with type streamOptimized {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2317.385435] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d7ffc9a6-98e8-466d-94c0-f776e1ff2883 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.407628] env[62684]: DEBUG oslo_vmware.api [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for the task: (returnval){ [ 2317.407628] env[62684]: value = "task-2053771" [ 2317.407628] env[62684]: _type = "Task" [ 2317.407628] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2317.417021] env[62684]: DEBUG oslo_vmware.api [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053771, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2317.559539] env[62684]: INFO nova.compute.claims [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2317.735905] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "b88d9418-7e90-473e-bd9a-18bc398faad0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2317.736158] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "b88d9418-7e90-473e-bd9a-18bc398faad0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2317.803653] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2317.880530] env[62684]: DEBUG oslo_concurrency.lockutils [None req-7433cc9a-7715-4079-9d6e-1fe75a41a317 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "5b3668f3-219d-4304-bc9e-9b911762085d" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.279s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2317.918168] env[62684]: DEBUG oslo_vmware.api [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053771, 'name': ReconfigVM_Task, 'duration_secs': 0.398503} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2317.918466] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Reconfigured VM instance instance-00000068 to attach disk [datastore2] 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1/9f1e9ae9-c082-4fbe-bd21-6e14e40962c1.vmdk or device None with type streamOptimized {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2317.919152] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-88f8f674-ecb3-4882-b4c9-eca529a138f2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.926217] env[62684]: DEBUG oslo_vmware.api [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for the task: (returnval){ [ 2317.926217] env[62684]: value = "task-2053772" [ 2317.926217] env[62684]: _type = "Task" [ 2317.926217] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2317.935864] env[62684]: DEBUG oslo_vmware.api [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053772, 'name': Rename_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2318.065443] env[62684]: INFO nova.compute.resource_tracker [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Updating resource usage from migration 48d12785-9a2b-4109-9c3d-ce99edbcc23c [ 2318.161775] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0553d1e-db0b-4f5b-a301-d784af52395e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2318.171549] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ded7c2ad-aed2-4681-b225-933cd6031057 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2318.205157] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-648beb99-adaf-4850-ad78-5f27a4807197 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2318.215126] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-942620c4-9001-4b01-a832-e21c4faa888b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2318.231240] env[62684]: DEBUG nova.compute.provider_tree [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2318.239535] env[62684]: DEBUG nova.compute.manager [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2318.436944] env[62684]: DEBUG oslo_vmware.api [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053772, 'name': Rename_Task, 'duration_secs': 0.170687} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2318.437276] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2318.437535] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-631eb59e-2ae9-4add-9cbf-c802fbd57946 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2318.444988] env[62684]: DEBUG oslo_vmware.api [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for the task: (returnval){ [ 2318.444988] env[62684]: value = "task-2053773" [ 2318.444988] env[62684]: _type = "Task" [ 2318.444988] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2318.455659] env[62684]: DEBUG oslo_vmware.api [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053773, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2318.734807] env[62684]: DEBUG nova.scheduler.client.report [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2318.757442] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2318.955934] env[62684]: DEBUG oslo_vmware.api [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053773, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2319.145480] env[62684]: DEBUG oslo_concurrency.lockutils [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "f7b61e23-fe0d-41fb-9100-d07cd8cb2d04" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2319.145740] env[62684]: DEBUG oslo_concurrency.lockutils [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "f7b61e23-fe0d-41fb-9100-d07cd8cb2d04" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2319.239741] env[62684]: DEBUG oslo_concurrency.lockutils [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.185s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2319.239964] env[62684]: INFO nova.compute.manager [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Migrating [ 2319.246540] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.443s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2319.246729] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2319.246887] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62684) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2319.247214] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.490s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2319.248678] env[62684]: INFO nova.compute.claims [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2319.254069] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed661af8-e686-40aa-927a-abc5e74b0636 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.267253] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61d3e9d5-9960-4e19-be0a-cd4572c0c0fe {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.285507] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-238f5ac0-abef-4085-8656-bb617e7a5d47 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.293768] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ed638d6-bee5-4e25-8369-e1c9dc432265 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.323302] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180209MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=62684) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2319.323446] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2319.455742] env[62684]: DEBUG oslo_vmware.api [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053773, 'name': PowerOnVM_Task, 'duration_secs': 0.709594} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2319.456104] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2319.557253] env[62684]: DEBUG nova.compute.manager [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2319.558178] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efa4cd32-38aa-4c9c-834f-f82873e6610e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.648655] env[62684]: DEBUG nova.compute.manager [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2319.763381] env[62684]: DEBUG oslo_concurrency.lockutils [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "refresh_cache-69d26980-f42d-4d35-8de3-a85d7a6f0a11" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2319.763541] env[62684]: DEBUG oslo_concurrency.lockutils [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquired lock "refresh_cache-69d26980-f42d-4d35-8de3-a85d7a6f0a11" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2319.763719] env[62684]: DEBUG nova.network.neutron [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2320.076863] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e6b06698-6751-44e4-a056-012254f17c5c tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Lock "9f1e9ae9-c082-4fbe-bd21-6e14e40962c1" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 25.483s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2320.172490] env[62684]: DEBUG oslo_concurrency.lockutils [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2320.379434] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2cd78ff-f231-4ec6-96fb-0e3709c56646 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2320.387501] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90a6e9d9-b6b9-48fc-ae68-ea1bf053fdde {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2320.419884] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-180bb274-38db-436d-a725-e0162a544a86 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2320.428341] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d660f9d-9d96-4ce4-bdc7-54862b9f040c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2320.442653] env[62684]: DEBUG nova.compute.provider_tree [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2320.573651] env[62684]: DEBUG nova.network.neutron [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Updating instance_info_cache with network_info: [{"id": "0d202452-add9-4ae7-b035-6554f287d049", "address": "fa:16:3e:3b:e0:ab", "network": {"id": "b24dd0c0-a394-4ca6-a79a-94535bc1df6f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2023102141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "607a0aa1049640d882d7dd490f5f98ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d202452-ad", "ovs_interfaceid": "0d202452-add9-4ae7-b035-6554f287d049", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2320.947918] env[62684]: DEBUG nova.scheduler.client.report [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2321.076248] env[62684]: DEBUG oslo_concurrency.lockutils [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Releasing lock "refresh_cache-69d26980-f42d-4d35-8de3-a85d7a6f0a11" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2321.305842] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45ce1e43-e91c-4478-877d-6f4aab9c59ca {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.313580] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b1ca272d-204b-4e6a-ad31-5ec2e4c46c80 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Suspending the VM {{(pid=62684) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 2321.313820] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-ef3a16fb-e84e-41c4-96c7-cedf6d0a6a6b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.320636] env[62684]: DEBUG oslo_vmware.api [None req-b1ca272d-204b-4e6a-ad31-5ec2e4c46c80 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for the task: (returnval){ [ 2321.320636] env[62684]: value = "task-2053774" [ 2321.320636] env[62684]: _type = "Task" [ 2321.320636] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2321.329380] env[62684]: DEBUG oslo_vmware.api [None req-b1ca272d-204b-4e6a-ad31-5ec2e4c46c80 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053774, 'name': SuspendVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2321.453146] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.206s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2321.453732] env[62684]: DEBUG nova.compute.manager [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2321.456568] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 2.133s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2321.832196] env[62684]: DEBUG oslo_vmware.api [None req-b1ca272d-204b-4e6a-ad31-5ec2e4c46c80 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053774, 'name': SuspendVM_Task} progress is 70%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2321.960574] env[62684]: DEBUG nova.compute.utils [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2321.965506] env[62684]: DEBUG nova.compute.manager [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2321.965641] env[62684]: DEBUG nova.network.neutron [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2322.030367] env[62684]: DEBUG nova.policy [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e3a532747bda4c7e8aa2892b424a47ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '263c101fcc5e493789b79dfd1ba97cc0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2322.333709] env[62684]: DEBUG oslo_vmware.api [None req-b1ca272d-204b-4e6a-ad31-5ec2e4c46c80 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053774, 'name': SuspendVM_Task, 'duration_secs': 0.571388} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2322.334306] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b1ca272d-204b-4e6a-ad31-5ec2e4c46c80 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Suspended the VM {{(pid=62684) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 2322.334668] env[62684]: DEBUG nova.compute.manager [None req-b1ca272d-204b-4e6a-ad31-5ec2e4c46c80 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2322.335987] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b04f3afe-0d57-4acf-8a80-bff9670917a4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2322.376921] env[62684]: DEBUG nova.network.neutron [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Successfully created port: 7e4b9e76-bf05-4ee7-b25c-922484094be0 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2322.466177] env[62684]: DEBUG nova.compute.manager [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2322.469344] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Applying migration context for instance 69d26980-f42d-4d35-8de3-a85d7a6f0a11 as it has an incoming, in-progress migration 48d12785-9a2b-4109-9c3d-ce99edbcc23c. Migration status is migrating {{(pid=62684) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 2322.470290] env[62684]: INFO nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Updating resource usage from migration 48d12785-9a2b-4109-9c3d-ce99edbcc23c [ 2322.491825] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 0156d807-1ab4-482f-91d1-172bf32bf23c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2322.491978] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 587edf89-2ea0-4b89-8830-fa766b798398 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2322.492125] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 5b3668f3-219d-4304-bc9e-9b911762085d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2322.492254] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2322.492374] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Migration 48d12785-9a2b-4109-9c3d-ce99edbcc23c is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 2322.492488] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 69d26980-f42d-4d35-8de3-a85d7a6f0a11 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2322.492600] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance b88d9418-7e90-473e-bd9a-18bc398faad0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2322.591331] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9f49401-1cfe-4228-9609-ab5be1a0a9c0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2322.610073] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Updating instance '69d26980-f42d-4d35-8de3-a85d7a6f0a11' progress to 0 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2322.995364] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance f7b61e23-fe0d-41fb-9100-d07cd8cb2d04 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2322.995682] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2322.995786] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1920MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2323.091801] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6c71bab-5781-4038-aa2d-700bb927d44f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2323.099921] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43185b2a-250d-494f-8cf9-1850b4a46439 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2323.130032] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2323.130424] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-923afeab-dd0a-43ad-8658-fc70afd1d6df {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2323.132547] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfc0dab2-30cf-4de9-9798-b7a7cb022b64 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2323.139752] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a122c22-a21a-41c8-bc21-a99ec6c2141b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2323.144846] env[62684]: DEBUG oslo_vmware.api [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2323.144846] env[62684]: value = "task-2053775" [ 2323.144846] env[62684]: _type = "Task" [ 2323.144846] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2323.155723] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2323.162740] env[62684]: DEBUG oslo_vmware.api [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053775, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2323.480049] env[62684]: DEBUG nova.compute.manager [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2323.502981] env[62684]: DEBUG nova.virt.hardware [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2323.503287] env[62684]: DEBUG nova.virt.hardware [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2323.503453] env[62684]: DEBUG nova.virt.hardware [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2323.503686] env[62684]: DEBUG nova.virt.hardware [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2323.503844] env[62684]: DEBUG nova.virt.hardware [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2323.504013] env[62684]: DEBUG nova.virt.hardware [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2323.504241] env[62684]: DEBUG nova.virt.hardware [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2323.504406] env[62684]: DEBUG nova.virt.hardware [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2323.504577] env[62684]: DEBUG nova.virt.hardware [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2323.504750] env[62684]: DEBUG nova.virt.hardware [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2323.504926] env[62684]: DEBUG nova.virt.hardware [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2323.505802] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3a2fd60-947f-4b21-bc0f-f54b88b7f1e1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2323.514974] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-786b039b-2b1d-4224-bd5f-ee9d41fef074 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2323.657201] env[62684]: DEBUG oslo_vmware.api [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053775, 'name': PowerOffVM_Task, 'duration_secs': 0.227756} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2323.657535] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2323.657747] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Updating instance '69d26980-f42d-4d35-8de3-a85d7a6f0a11' progress to 17 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2323.662044] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2323.736839] env[62684]: INFO nova.compute.manager [None req-1ec370f0-0ca8-4178-819a-6773e4446117 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Resuming [ 2323.737507] env[62684]: DEBUG nova.objects.instance [None req-1ec370f0-0ca8-4178-819a-6773e4446117 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Lazy-loading 'flavor' on Instance uuid 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2324.167062] env[62684]: DEBUG nova.virt.hardware [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2324.167413] env[62684]: DEBUG nova.virt.hardware [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2324.167461] env[62684]: DEBUG nova.virt.hardware [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2324.167656] env[62684]: DEBUG nova.virt.hardware [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2324.167810] env[62684]: DEBUG nova.virt.hardware [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2324.167965] env[62684]: DEBUG nova.virt.hardware [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2324.168360] env[62684]: DEBUG nova.virt.hardware [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2324.168614] env[62684]: DEBUG nova.virt.hardware [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2324.168827] env[62684]: DEBUG nova.virt.hardware [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2324.169017] env[62684]: DEBUG nova.virt.hardware [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2324.169210] env[62684]: DEBUG nova.virt.hardware [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2324.174515] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62684) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2324.174697] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.718s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2324.174927] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-822b98f0-c130-483e-95b9-eb05072b79e6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2324.185246] env[62684]: DEBUG oslo_concurrency.lockutils [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.013s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2324.186723] env[62684]: INFO nova.compute.claims [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2324.196688] env[62684]: DEBUG oslo_vmware.api [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2324.196688] env[62684]: value = "task-2053776" [ 2324.196688] env[62684]: _type = "Task" [ 2324.196688] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2324.204631] env[62684]: DEBUG oslo_vmware.api [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053776, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2324.706249] env[62684]: DEBUG oslo_vmware.api [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053776, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2324.745275] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1ec370f0-0ca8-4178-819a-6773e4446117 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Acquiring lock "refresh_cache-9f1e9ae9-c082-4fbe-bd21-6e14e40962c1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2324.745476] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1ec370f0-0ca8-4178-819a-6773e4446117 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Acquired lock "refresh_cache-9f1e9ae9-c082-4fbe-bd21-6e14e40962c1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2324.745711] env[62684]: DEBUG nova.network.neutron [None req-1ec370f0-0ca8-4178-819a-6773e4446117 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2325.206644] env[62684]: DEBUG oslo_vmware.api [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053776, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2325.292764] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f695a5a6-c7af-413e-91c9-ac972dbd3e0f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2325.300099] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-838e0ebe-26a4-452b-9e55-ebe68f595ba3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2325.332632] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b6afebe-f443-4d7d-a951-a92f844f6000 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2325.340046] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68d3822d-a926-4076-aec1-6a862da50e80 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2325.353293] env[62684]: DEBUG nova.compute.provider_tree [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2325.467679] env[62684]: DEBUG nova.network.neutron [None req-1ec370f0-0ca8-4178-819a-6773e4446117 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Updating instance_info_cache with network_info: [{"id": "61adf5cc-1692-4079-b909-b15313ce9680", "address": "fa:16:3e:17:9f:65", "network": {"id": "4142ba34-c2e0-4a22-a8dd-be06ba98c6e5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1627792019-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f0e0f0e1dc834134913bd742fa99b52f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61adf5cc-16", "ovs_interfaceid": "61adf5cc-1692-4079-b909-b15313ce9680", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2325.708047] env[62684]: DEBUG oslo_vmware.api [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053776, 'name': ReconfigVM_Task, 'duration_secs': 1.116854} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2325.708047] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Updating instance '69d26980-f42d-4d35-8de3-a85d7a6f0a11' progress to 33 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2325.858645] env[62684]: DEBUG nova.scheduler.client.report [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2325.970170] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1ec370f0-0ca8-4178-819a-6773e4446117 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Releasing lock "refresh_cache-9f1e9ae9-c082-4fbe-bd21-6e14e40962c1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2325.971219] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05fd5690-0686-4472-a89a-e6e7f1195109 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2325.978328] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-1ec370f0-0ca8-4178-819a-6773e4446117 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Resuming the VM {{(pid=62684) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 2325.978568] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a3b696e2-eecd-471d-b9e9-fbbbd08bc6ef {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2325.984253] env[62684]: DEBUG oslo_vmware.api [None req-1ec370f0-0ca8-4178-819a-6773e4446117 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for the task: (returnval){ [ 2325.984253] env[62684]: value = "task-2053777" [ 2325.984253] env[62684]: _type = "Task" [ 2325.984253] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2325.991340] env[62684]: DEBUG oslo_vmware.api [None req-1ec370f0-0ca8-4178-819a-6773e4446117 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053777, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2326.214970] env[62684]: DEBUG nova.virt.hardware [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2326.215375] env[62684]: DEBUG nova.virt.hardware [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2326.215414] env[62684]: DEBUG nova.virt.hardware [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2326.215613] env[62684]: DEBUG nova.virt.hardware [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2326.215765] env[62684]: DEBUG nova.virt.hardware [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2326.215918] env[62684]: DEBUG nova.virt.hardware [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2326.216217] env[62684]: DEBUG nova.virt.hardware [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2326.216297] env[62684]: DEBUG nova.virt.hardware [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2326.216472] env[62684]: DEBUG nova.virt.hardware [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2326.216656] env[62684]: DEBUG nova.virt.hardware [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2326.216855] env[62684]: DEBUG nova.virt.hardware [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2326.222191] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Reconfiguring VM instance instance-0000006f to detach disk 2000 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2326.222500] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fedbda9e-302d-49b1-b4ae-5e0ec2202dc4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2326.241469] env[62684]: DEBUG oslo_vmware.api [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2326.241469] env[62684]: value = "task-2053778" [ 2326.241469] env[62684]: _type = "Task" [ 2326.241469] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2326.249620] env[62684]: DEBUG oslo_vmware.api [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053778, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2326.364153] env[62684]: DEBUG oslo_concurrency.lockutils [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.179s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2326.365100] env[62684]: DEBUG nova.compute.manager [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2326.499219] env[62684]: DEBUG oslo_vmware.api [None req-1ec370f0-0ca8-4178-819a-6773e4446117 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053777, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2326.752828] env[62684]: DEBUG oslo_vmware.api [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053778, 'name': ReconfigVM_Task, 'duration_secs': 0.284554} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2326.753197] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Reconfigured VM instance instance-0000006f to detach disk 2000 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2326.754111] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a02c2ef8-88fe-49b7-ae1f-c183ced9e34e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2326.777947] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Reconfiguring VM instance instance-0000006f to attach disk [datastore2] 69d26980-f42d-4d35-8de3-a85d7a6f0a11/69d26980-f42d-4d35-8de3-a85d7a6f0a11.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2326.778280] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b38641c3-46b8-40ee-a33a-7a397f1436eb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2326.797216] env[62684]: DEBUG oslo_vmware.api [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2326.797216] env[62684]: value = "task-2053779" [ 2326.797216] env[62684]: _type = "Task" [ 2326.797216] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2326.805559] env[62684]: DEBUG oslo_vmware.api [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053779, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2326.870271] env[62684]: DEBUG nova.compute.utils [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2326.871875] env[62684]: DEBUG nova.compute.manager [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2326.872065] env[62684]: DEBUG nova.network.neutron [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2326.877221] env[62684]: DEBUG nova.compute.manager [req-9343e41c-2ed2-4859-a3c3-52c49d2e8eb4 req-ab6ff478-fe16-447a-bdcb-06c94bb57ade service nova] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Received event network-vif-plugged-7e4b9e76-bf05-4ee7-b25c-922484094be0 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2326.877437] env[62684]: DEBUG oslo_concurrency.lockutils [req-9343e41c-2ed2-4859-a3c3-52c49d2e8eb4 req-ab6ff478-fe16-447a-bdcb-06c94bb57ade service nova] Acquiring lock "b88d9418-7e90-473e-bd9a-18bc398faad0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2326.877674] env[62684]: DEBUG oslo_concurrency.lockutils [req-9343e41c-2ed2-4859-a3c3-52c49d2e8eb4 req-ab6ff478-fe16-447a-bdcb-06c94bb57ade service nova] Lock "b88d9418-7e90-473e-bd9a-18bc398faad0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2326.877851] env[62684]: DEBUG oslo_concurrency.lockutils [req-9343e41c-2ed2-4859-a3c3-52c49d2e8eb4 req-ab6ff478-fe16-447a-bdcb-06c94bb57ade service nova] Lock "b88d9418-7e90-473e-bd9a-18bc398faad0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2326.878032] env[62684]: DEBUG nova.compute.manager [req-9343e41c-2ed2-4859-a3c3-52c49d2e8eb4 req-ab6ff478-fe16-447a-bdcb-06c94bb57ade service nova] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] No waiting events found dispatching network-vif-plugged-7e4b9e76-bf05-4ee7-b25c-922484094be0 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2326.878209] env[62684]: WARNING nova.compute.manager [req-9343e41c-2ed2-4859-a3c3-52c49d2e8eb4 req-ab6ff478-fe16-447a-bdcb-06c94bb57ade service nova] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Received unexpected event network-vif-plugged-7e4b9e76-bf05-4ee7-b25c-922484094be0 for instance with vm_state building and task_state spawning. [ 2326.947401] env[62684]: DEBUG nova.policy [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '58ea1db87d2b44408282a8b82d799443', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '947e7359aaba456fa1763f4dc8e9d359', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2326.995576] env[62684]: DEBUG oslo_vmware.api [None req-1ec370f0-0ca8-4178-819a-6773e4446117 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053777, 'name': PowerOnVM_Task, 'duration_secs': 0.540636} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2326.995910] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-1ec370f0-0ca8-4178-819a-6773e4446117 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Resumed the VM {{(pid=62684) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 2326.996143] env[62684]: DEBUG nova.compute.manager [None req-1ec370f0-0ca8-4178-819a-6773e4446117 tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2326.997019] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29722387-c5e0-4a7e-9315-9da01043050a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.110020] env[62684]: DEBUG nova.network.neutron [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Successfully updated port: 7e4b9e76-bf05-4ee7-b25c-922484094be0 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2327.190575] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2327.191225] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2327.191397] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Starting heal instance info cache {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2327.307393] env[62684]: DEBUG oslo_vmware.api [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053779, 'name': ReconfigVM_Task, 'duration_secs': 0.438953} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2327.307781] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Reconfigured VM instance instance-0000006f to attach disk [datastore2] 69d26980-f42d-4d35-8de3-a85d7a6f0a11/69d26980-f42d-4d35-8de3-a85d7a6f0a11.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2327.307872] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Updating instance '69d26980-f42d-4d35-8de3-a85d7a6f0a11' progress to 50 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2327.313045] env[62684]: DEBUG nova.network.neutron [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Successfully created port: 7a61c618-de41-42f8-a8f4-ed5cf615ac72 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2327.375429] env[62684]: DEBUG nova.compute.manager [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2327.617350] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "refresh_cache-b88d9418-7e90-473e-bd9a-18bc398faad0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2327.617350] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquired lock "refresh_cache-b88d9418-7e90-473e-bd9a-18bc398faad0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2327.617350] env[62684]: DEBUG nova.network.neutron [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2327.733320] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "refresh_cache-0156d807-1ab4-482f-91d1-172bf32bf23c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2327.733464] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired lock "refresh_cache-0156d807-1ab4-482f-91d1-172bf32bf23c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2327.733612] env[62684]: DEBUG nova.network.neutron [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Forcefully refreshing network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2327.814446] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b19ff692-f4f2-405f-9030-6f8696caa85e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.834375] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19e7e279-33c2-499c-b628-71c2a54cbad1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.852010] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Updating instance '69d26980-f42d-4d35-8de3-a85d7a6f0a11' progress to 67 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2328.178520] env[62684]: DEBUG nova.network.neutron [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2328.385281] env[62684]: DEBUG nova.compute.manager [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2328.391681] env[62684]: DEBUG nova.network.neutron [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Port 0d202452-add9-4ae7-b035-6554f287d049 binding to destination host cpu-1 is already ACTIVE {{(pid=62684) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2328.412486] env[62684]: DEBUG nova.virt.hardware [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2328.412742] env[62684]: DEBUG nova.virt.hardware [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2328.412906] env[62684]: DEBUG nova.virt.hardware [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2328.413111] env[62684]: DEBUG nova.virt.hardware [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2328.413290] env[62684]: DEBUG nova.virt.hardware [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2328.413452] env[62684]: DEBUG nova.virt.hardware [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2328.413683] env[62684]: DEBUG nova.virt.hardware [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2328.413833] env[62684]: DEBUG nova.virt.hardware [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2328.414014] env[62684]: DEBUG nova.virt.hardware [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2328.414198] env[62684]: DEBUG nova.virt.hardware [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2328.414384] env[62684]: DEBUG nova.virt.hardware [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2328.415504] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06289084-b64e-4098-b41a-cfd93866b663 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2328.425894] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2b3d41e-5e4f-41a8-ad0c-f2ae5a78ffab {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2328.712118] env[62684]: DEBUG nova.network.neutron [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Updating instance_info_cache with network_info: [{"id": "7e4b9e76-bf05-4ee7-b25c-922484094be0", "address": "fa:16:3e:03:9c:84", "network": {"id": "1751424b-54a9-4879-9f32-aa15a9bb632c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-120070593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "263c101fcc5e493789b79dfd1ba97cc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92e4d027-e755-417b-8eea-9a8f24b85140", "external-id": "nsx-vlan-transportzone-756", "segmentation_id": 756, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e4b9e76-bf", "ovs_interfaceid": "7e4b9e76-bf05-4ee7-b25c-922484094be0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2328.921685] env[62684]: DEBUG nova.compute.manager [req-1de3dcf4-11b5-44c3-9937-47b923d07bd9 req-9ba40547-fa3b-4544-a1bd-b5e368b8420a service nova] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Received event network-changed-7e4b9e76-bf05-4ee7-b25c-922484094be0 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2328.921934] env[62684]: DEBUG nova.compute.manager [req-1de3dcf4-11b5-44c3-9937-47b923d07bd9 req-9ba40547-fa3b-4544-a1bd-b5e368b8420a service nova] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Refreshing instance network info cache due to event network-changed-7e4b9e76-bf05-4ee7-b25c-922484094be0. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2328.922276] env[62684]: DEBUG oslo_concurrency.lockutils [req-1de3dcf4-11b5-44c3-9937-47b923d07bd9 req-9ba40547-fa3b-4544-a1bd-b5e368b8420a service nova] Acquiring lock "refresh_cache-b88d9418-7e90-473e-bd9a-18bc398faad0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2329.192152] env[62684]: DEBUG nova.network.neutron [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Successfully updated port: 7a61c618-de41-42f8-a8f4-ed5cf615ac72 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2329.214369] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Releasing lock "refresh_cache-b88d9418-7e90-473e-bd9a-18bc398faad0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2329.214767] env[62684]: DEBUG nova.compute.manager [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Instance network_info: |[{"id": "7e4b9e76-bf05-4ee7-b25c-922484094be0", "address": "fa:16:3e:03:9c:84", "network": {"id": "1751424b-54a9-4879-9f32-aa15a9bb632c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-120070593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "263c101fcc5e493789b79dfd1ba97cc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92e4d027-e755-417b-8eea-9a8f24b85140", "external-id": "nsx-vlan-transportzone-756", "segmentation_id": 756, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e4b9e76-bf", "ovs_interfaceid": "7e4b9e76-bf05-4ee7-b25c-922484094be0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2329.215056] env[62684]: DEBUG oslo_concurrency.lockutils [req-1de3dcf4-11b5-44c3-9937-47b923d07bd9 req-9ba40547-fa3b-4544-a1bd-b5e368b8420a service nova] Acquired lock "refresh_cache-b88d9418-7e90-473e-bd9a-18bc398faad0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2329.215300] env[62684]: DEBUG nova.network.neutron [req-1de3dcf4-11b5-44c3-9937-47b923d07bd9 req-9ba40547-fa3b-4544-a1bd-b5e368b8420a service nova] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Refreshing network info cache for port 7e4b9e76-bf05-4ee7-b25c-922484094be0 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2329.216543] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:03:9c:84', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92e4d027-e755-417b-8eea-9a8f24b85140', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7e4b9e76-bf05-4ee7-b25c-922484094be0', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2329.232096] env[62684]: DEBUG oslo.service.loopingcall [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2329.233872] env[62684]: DEBUG nova.network.neutron [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Updating instance_info_cache with network_info: [{"id": "f5c06971-b96a-4fa0-858e-5e47100e2e68", "address": "fa:16:3e:9e:fc:9d", "network": {"id": "e177c6d0-ddd5-4029-94af-c8f1b937dd9f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1344612161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.224", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27d04006afc747e19ad87238bfdbaad1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "171aeae0-6a27-44fc-bc3d-a2d5581fc702", "external-id": "nsx-vlan-transportzone-410", "segmentation_id": 410, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5c06971-b9", "ovs_interfaceid": "f5c06971-b96a-4fa0-858e-5e47100e2e68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2329.237030] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2329.237842] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5717e39e-33ec-4d09-85bb-9bd34d45b409 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2329.259385] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2329.259385] env[62684]: value = "task-2053780" [ 2329.259385] env[62684]: _type = "Task" [ 2329.259385] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2329.267400] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053780, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2329.409238] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a9397652-3986-4994-bea2-8f2d4c4919db tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "587edf89-2ea0-4b89-8830-fa766b798398" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2329.409660] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a9397652-3986-4994-bea2-8f2d4c4919db tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "587edf89-2ea0-4b89-8830-fa766b798398" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2329.416636] env[62684]: DEBUG oslo_concurrency.lockutils [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "69d26980-f42d-4d35-8de3-a85d7a6f0a11-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2329.416856] env[62684]: DEBUG oslo_concurrency.lockutils [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "69d26980-f42d-4d35-8de3-a85d7a6f0a11-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2329.417046] env[62684]: DEBUG oslo_concurrency.lockutils [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "69d26980-f42d-4d35-8de3-a85d7a6f0a11-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2329.532377] env[62684]: DEBUG nova.network.neutron [req-1de3dcf4-11b5-44c3-9937-47b923d07bd9 req-9ba40547-fa3b-4544-a1bd-b5e368b8420a service nova] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Updated VIF entry in instance network info cache for port 7e4b9e76-bf05-4ee7-b25c-922484094be0. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2329.532770] env[62684]: DEBUG nova.network.neutron [req-1de3dcf4-11b5-44c3-9937-47b923d07bd9 req-9ba40547-fa3b-4544-a1bd-b5e368b8420a service nova] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Updating instance_info_cache with network_info: [{"id": "7e4b9e76-bf05-4ee7-b25c-922484094be0", "address": "fa:16:3e:03:9c:84", "network": {"id": "1751424b-54a9-4879-9f32-aa15a9bb632c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-120070593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "263c101fcc5e493789b79dfd1ba97cc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92e4d027-e755-417b-8eea-9a8f24b85140", "external-id": "nsx-vlan-transportzone-756", "segmentation_id": 756, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e4b9e76-bf", "ovs_interfaceid": "7e4b9e76-bf05-4ee7-b25c-922484094be0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2329.700064] env[62684]: DEBUG oslo_concurrency.lockutils [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "refresh_cache-f7b61e23-fe0d-41fb-9100-d07cd8cb2d04" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2329.700183] env[62684]: DEBUG oslo_concurrency.lockutils [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquired lock "refresh_cache-f7b61e23-fe0d-41fb-9100-d07cd8cb2d04" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2329.700259] env[62684]: DEBUG nova.network.neutron [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2329.738173] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Releasing lock "refresh_cache-0156d807-1ab4-482f-91d1-172bf32bf23c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2329.738376] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Updated the network info_cache for instance {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 2329.738579] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2329.738754] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2329.738988] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2329.739172] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2329.739319] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2329.739464] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2329.739593] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62684) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2329.769556] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053780, 'name': CreateVM_Task, 'duration_secs': 0.320004} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2329.769745] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2329.776084] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2329.776266] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2329.776584] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2329.776828] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed212032-1984-4c77-8b53-a1b71af14227 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2329.781451] env[62684]: DEBUG oslo_vmware.api [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2329.781451] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5269154b-1e9d-e482-bd66-574829ff8cf5" [ 2329.781451] env[62684]: _type = "Task" [ 2329.781451] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2329.789068] env[62684]: DEBUG oslo_vmware.api [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5269154b-1e9d-e482-bd66-574829ff8cf5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2329.919652] env[62684]: INFO nova.compute.manager [None req-a9397652-3986-4994-bea2-8f2d4c4919db tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Detaching volume bd2d9701-64ca-4c51-b6f2-382756dbec34 [ 2329.966703] env[62684]: INFO nova.virt.block_device [None req-a9397652-3986-4994-bea2-8f2d4c4919db tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Attempting to driver detach volume bd2d9701-64ca-4c51-b6f2-382756dbec34 from mountpoint /dev/sdb [ 2329.966703] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9397652-3986-4994-bea2-8f2d4c4919db tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Volume detach. Driver type: vmdk {{(pid=62684) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2329.966703] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9397652-3986-4994-bea2-8f2d4c4919db tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421403', 'volume_id': 'bd2d9701-64ca-4c51-b6f2-382756dbec34', 'name': 'volume-bd2d9701-64ca-4c51-b6f2-382756dbec34', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '587edf89-2ea0-4b89-8830-fa766b798398', 'attached_at': '', 'detached_at': '', 'volume_id': 'bd2d9701-64ca-4c51-b6f2-382756dbec34', 'serial': 'bd2d9701-64ca-4c51-b6f2-382756dbec34'} {{(pid=62684) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2329.966703] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0bfc70b-eb55-40c2-9182-8b7eac6c7c1b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2329.989700] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6da88f21-7ecf-4d40-93e4-9678ce445e7d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2329.996530] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2443e00b-e913-430c-806e-90954ab08a3f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.016800] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b39c3f3e-b25e-461e-a5e1-a6fbf063e050 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.031570] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9397652-3986-4994-bea2-8f2d4c4919db tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] The volume has not been displaced from its original location: [datastore1] volume-bd2d9701-64ca-4c51-b6f2-382756dbec34/volume-bd2d9701-64ca-4c51-b6f2-382756dbec34.vmdk. No consolidation needed. {{(pid=62684) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2330.038013] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9397652-3986-4994-bea2-8f2d4c4919db tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Reconfiguring VM instance instance-00000067 to detach disk 2001 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2330.038565] env[62684]: DEBUG oslo_concurrency.lockutils [req-1de3dcf4-11b5-44c3-9937-47b923d07bd9 req-9ba40547-fa3b-4544-a1bd-b5e368b8420a service nova] Releasing lock "refresh_cache-b88d9418-7e90-473e-bd9a-18bc398faad0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2330.038935] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-50c11924-d183-4d67-abe8-e6363bef45c3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.057183] env[62684]: DEBUG oslo_vmware.api [None req-a9397652-3986-4994-bea2-8f2d4c4919db tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2330.057183] env[62684]: value = "task-2053781" [ 2330.057183] env[62684]: _type = "Task" [ 2330.057183] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2330.064753] env[62684]: DEBUG oslo_vmware.api [None req-a9397652-3986-4994-bea2-8f2d4c4919db tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053781, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2330.248993] env[62684]: DEBUG nova.network.neutron [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2330.292078] env[62684]: DEBUG oslo_vmware.api [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5269154b-1e9d-e482-bd66-574829ff8cf5, 'name': SearchDatastore_Task, 'duration_secs': 0.009138} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2330.294569] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2330.294826] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2330.295094] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2330.295252] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2330.295439] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2330.295716] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-870cc0a7-a3d1-41e4-9b57-64db27b44404 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.313131] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2330.313326] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2330.314066] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-142e7532-5b33-48b5-affa-9ee689fc4304 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.319621] env[62684]: DEBUG oslo_vmware.api [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2330.319621] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c9dead-e271-1687-936a-2ad2b41969c7" [ 2330.319621] env[62684]: _type = "Task" [ 2330.319621] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2330.332954] env[62684]: DEBUG oslo_vmware.api [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c9dead-e271-1687-936a-2ad2b41969c7, 'name': SearchDatastore_Task, 'duration_secs': 0.010993} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2330.333798] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be639d6b-0a6c-4ba4-8ca9-b6ab6b9e6b54 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.338600] env[62684]: DEBUG oslo_vmware.api [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2330.338600] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b1eb5e-464f-b22f-251d-f55ea0c90ee5" [ 2330.338600] env[62684]: _type = "Task" [ 2330.338600] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2330.345961] env[62684]: DEBUG oslo_vmware.api [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b1eb5e-464f-b22f-251d-f55ea0c90ee5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2330.394283] env[62684]: DEBUG nova.network.neutron [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Updating instance_info_cache with network_info: [{"id": "7a61c618-de41-42f8-a8f4-ed5cf615ac72", "address": "fa:16:3e:41:aa:33", "network": {"id": "8136a664-f757-43b3-a2fa-bacdf2e9566c", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1799567463-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "947e7359aaba456fa1763f4dc8e9d359", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cc30a16-f070-421c-964e-50c9aa32f17a", "external-id": "nsx-vlan-transportzone-424", "segmentation_id": 424, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a61c618-de", "ovs_interfaceid": "7a61c618-de41-42f8-a8f4-ed5cf615ac72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2330.452837] env[62684]: DEBUG oslo_concurrency.lockutils [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "refresh_cache-69d26980-f42d-4d35-8de3-a85d7a6f0a11" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2330.453145] env[62684]: DEBUG oslo_concurrency.lockutils [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquired lock "refresh_cache-69d26980-f42d-4d35-8de3-a85d7a6f0a11" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2330.453333] env[62684]: DEBUG nova.network.neutron [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2330.567333] env[62684]: DEBUG oslo_vmware.api [None req-a9397652-3986-4994-bea2-8f2d4c4919db tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053781, 'name': ReconfigVM_Task, 'duration_secs': 0.28678} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2330.567612] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9397652-3986-4994-bea2-8f2d4c4919db tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Reconfigured VM instance instance-00000067 to detach disk 2001 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2330.572185] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e74f591f-1adb-4012-a59b-efab21dc23e2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.586944] env[62684]: DEBUG oslo_vmware.api [None req-a9397652-3986-4994-bea2-8f2d4c4919db tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2330.586944] env[62684]: value = "task-2053782" [ 2330.586944] env[62684]: _type = "Task" [ 2330.586944] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2330.595826] env[62684]: DEBUG oslo_vmware.api [None req-a9397652-3986-4994-bea2-8f2d4c4919db tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053782, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2330.848491] env[62684]: DEBUG oslo_vmware.api [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52b1eb5e-464f-b22f-251d-f55ea0c90ee5, 'name': SearchDatastore_Task, 'duration_secs': 0.008827} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2330.848757] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2330.849030] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] b88d9418-7e90-473e-bd9a-18bc398faad0/b88d9418-7e90-473e-bd9a-18bc398faad0.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2330.849295] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dd76a45e-c2c4-4431-8968-69fc2ce6bf9e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.855955] env[62684]: DEBUG oslo_vmware.api [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2330.855955] env[62684]: value = "task-2053783" [ 2330.855955] env[62684]: _type = "Task" [ 2330.855955] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2330.862991] env[62684]: DEBUG oslo_vmware.api [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053783, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2330.896547] env[62684]: DEBUG oslo_concurrency.lockutils [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Releasing lock "refresh_cache-f7b61e23-fe0d-41fb-9100-d07cd8cb2d04" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2330.896838] env[62684]: DEBUG nova.compute.manager [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Instance network_info: |[{"id": "7a61c618-de41-42f8-a8f4-ed5cf615ac72", "address": "fa:16:3e:41:aa:33", "network": {"id": "8136a664-f757-43b3-a2fa-bacdf2e9566c", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1799567463-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "947e7359aaba456fa1763f4dc8e9d359", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cc30a16-f070-421c-964e-50c9aa32f17a", "external-id": "nsx-vlan-transportzone-424", "segmentation_id": 424, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a61c618-de", "ovs_interfaceid": "7a61c618-de41-42f8-a8f4-ed5cf615ac72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2330.897309] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:41:aa:33', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0cc30a16-f070-421c-964e-50c9aa32f17a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7a61c618-de41-42f8-a8f4-ed5cf615ac72', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2330.904681] env[62684]: DEBUG oslo.service.loopingcall [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2330.904887] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2330.905115] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-535f7c40-8ea2-4dd7-b75f-45383280d595 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.923167] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2330.923167] env[62684]: value = "task-2053784" [ 2330.923167] env[62684]: _type = "Task" [ 2330.923167] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2330.930403] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053784, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2330.975403] env[62684]: DEBUG nova.compute.manager [req-2e5e75e0-168e-495e-aac0-8e268bfebbf8 req-6bad2843-b4f1-429d-8a82-7700b1f22ac8 service nova] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Received event network-vif-plugged-7a61c618-de41-42f8-a8f4-ed5cf615ac72 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2330.975714] env[62684]: DEBUG oslo_concurrency.lockutils [req-2e5e75e0-168e-495e-aac0-8e268bfebbf8 req-6bad2843-b4f1-429d-8a82-7700b1f22ac8 service nova] Acquiring lock "f7b61e23-fe0d-41fb-9100-d07cd8cb2d04-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2330.975938] env[62684]: DEBUG oslo_concurrency.lockutils [req-2e5e75e0-168e-495e-aac0-8e268bfebbf8 req-6bad2843-b4f1-429d-8a82-7700b1f22ac8 service nova] Lock "f7b61e23-fe0d-41fb-9100-d07cd8cb2d04-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2330.975999] env[62684]: DEBUG oslo_concurrency.lockutils [req-2e5e75e0-168e-495e-aac0-8e268bfebbf8 req-6bad2843-b4f1-429d-8a82-7700b1f22ac8 service nova] Lock "f7b61e23-fe0d-41fb-9100-d07cd8cb2d04-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2330.976235] env[62684]: DEBUG nova.compute.manager [req-2e5e75e0-168e-495e-aac0-8e268bfebbf8 req-6bad2843-b4f1-429d-8a82-7700b1f22ac8 service nova] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] No waiting events found dispatching network-vif-plugged-7a61c618-de41-42f8-a8f4-ed5cf615ac72 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2330.976365] env[62684]: WARNING nova.compute.manager [req-2e5e75e0-168e-495e-aac0-8e268bfebbf8 req-6bad2843-b4f1-429d-8a82-7700b1f22ac8 service nova] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Received unexpected event network-vif-plugged-7a61c618-de41-42f8-a8f4-ed5cf615ac72 for instance with vm_state building and task_state spawning. [ 2330.976533] env[62684]: DEBUG nova.compute.manager [req-2e5e75e0-168e-495e-aac0-8e268bfebbf8 req-6bad2843-b4f1-429d-8a82-7700b1f22ac8 service nova] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Received event network-changed-7a61c618-de41-42f8-a8f4-ed5cf615ac72 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2330.976690] env[62684]: DEBUG nova.compute.manager [req-2e5e75e0-168e-495e-aac0-8e268bfebbf8 req-6bad2843-b4f1-429d-8a82-7700b1f22ac8 service nova] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Refreshing instance network info cache due to event network-changed-7a61c618-de41-42f8-a8f4-ed5cf615ac72. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2330.976881] env[62684]: DEBUG oslo_concurrency.lockutils [req-2e5e75e0-168e-495e-aac0-8e268bfebbf8 req-6bad2843-b4f1-429d-8a82-7700b1f22ac8 service nova] Acquiring lock "refresh_cache-f7b61e23-fe0d-41fb-9100-d07cd8cb2d04" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2330.977030] env[62684]: DEBUG oslo_concurrency.lockutils [req-2e5e75e0-168e-495e-aac0-8e268bfebbf8 req-6bad2843-b4f1-429d-8a82-7700b1f22ac8 service nova] Acquired lock "refresh_cache-f7b61e23-fe0d-41fb-9100-d07cd8cb2d04" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2330.977194] env[62684]: DEBUG nova.network.neutron [req-2e5e75e0-168e-495e-aac0-8e268bfebbf8 req-6bad2843-b4f1-429d-8a82-7700b1f22ac8 service nova] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Refreshing network info cache for port 7a61c618-de41-42f8-a8f4-ed5cf615ac72 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2331.097768] env[62684]: DEBUG oslo_vmware.api [None req-a9397652-3986-4994-bea2-8f2d4c4919db tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053782, 'name': ReconfigVM_Task, 'duration_secs': 0.13235} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2331.098105] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9397652-3986-4994-bea2-8f2d4c4919db tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421403', 'volume_id': 'bd2d9701-64ca-4c51-b6f2-382756dbec34', 'name': 'volume-bd2d9701-64ca-4c51-b6f2-382756dbec34', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '587edf89-2ea0-4b89-8830-fa766b798398', 'attached_at': '', 'detached_at': '', 'volume_id': 'bd2d9701-64ca-4c51-b6f2-382756dbec34', 'serial': 'bd2d9701-64ca-4c51-b6f2-382756dbec34'} {{(pid=62684) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2331.200326] env[62684]: DEBUG nova.network.neutron [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Updating instance_info_cache with network_info: [{"id": "0d202452-add9-4ae7-b035-6554f287d049", "address": "fa:16:3e:3b:e0:ab", "network": {"id": "b24dd0c0-a394-4ca6-a79a-94535bc1df6f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2023102141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "607a0aa1049640d882d7dd490f5f98ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d202452-ad", "ovs_interfaceid": "0d202452-add9-4ae7-b035-6554f287d049", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2331.366474] env[62684]: DEBUG oslo_vmware.api [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053783, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.451022} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2331.366814] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] b88d9418-7e90-473e-bd9a-18bc398faad0/b88d9418-7e90-473e-bd9a-18bc398faad0.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2331.367146] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2331.367440] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7e4c430b-175f-428a-ad61-2774e9842837 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2331.374244] env[62684]: DEBUG oslo_vmware.api [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2331.374244] env[62684]: value = "task-2053785" [ 2331.374244] env[62684]: _type = "Task" [ 2331.374244] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2331.381822] env[62684]: DEBUG oslo_vmware.api [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053785, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2331.432426] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053784, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2331.640057] env[62684]: DEBUG nova.objects.instance [None req-a9397652-3986-4994-bea2-8f2d4c4919db tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lazy-loading 'flavor' on Instance uuid 587edf89-2ea0-4b89-8830-fa766b798398 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2331.666117] env[62684]: DEBUG nova.network.neutron [req-2e5e75e0-168e-495e-aac0-8e268bfebbf8 req-6bad2843-b4f1-429d-8a82-7700b1f22ac8 service nova] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Updated VIF entry in instance network info cache for port 7a61c618-de41-42f8-a8f4-ed5cf615ac72. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2331.666483] env[62684]: DEBUG nova.network.neutron [req-2e5e75e0-168e-495e-aac0-8e268bfebbf8 req-6bad2843-b4f1-429d-8a82-7700b1f22ac8 service nova] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Updating instance_info_cache with network_info: [{"id": "7a61c618-de41-42f8-a8f4-ed5cf615ac72", "address": "fa:16:3e:41:aa:33", "network": {"id": "8136a664-f757-43b3-a2fa-bacdf2e9566c", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1799567463-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "947e7359aaba456fa1763f4dc8e9d359", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cc30a16-f070-421c-964e-50c9aa32f17a", "external-id": "nsx-vlan-transportzone-424", "segmentation_id": 424, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a61c618-de", "ovs_interfaceid": "7a61c618-de41-42f8-a8f4-ed5cf615ac72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2331.706223] env[62684]: DEBUG oslo_concurrency.lockutils [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Releasing lock "refresh_cache-69d26980-f42d-4d35-8de3-a85d7a6f0a11" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2331.885768] env[62684]: DEBUG oslo_vmware.api [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053785, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062585} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2331.885841] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2331.886633] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc49d29b-f283-4b18-a59d-ef59726767fb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2331.908630] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] b88d9418-7e90-473e-bd9a-18bc398faad0/b88d9418-7e90-473e-bd9a-18bc398faad0.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2331.908923] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d45be1b-9017-47a5-b227-1be369110962 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2331.928927] env[62684]: DEBUG oslo_vmware.api [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2331.928927] env[62684]: value = "task-2053786" [ 2331.928927] env[62684]: _type = "Task" [ 2331.928927] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2331.935103] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053784, 'name': CreateVM_Task, 'duration_secs': 0.528283} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2331.935586] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2331.936261] env[62684]: DEBUG oslo_concurrency.lockutils [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2331.936429] env[62684]: DEBUG oslo_concurrency.lockutils [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2331.936753] env[62684]: DEBUG oslo_concurrency.lockutils [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2331.939780] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88ba5ddd-bab9-45d4-a4ad-6d273b8ffeaf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2331.941297] env[62684]: DEBUG oslo_vmware.api [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053786, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2331.944144] env[62684]: DEBUG oslo_vmware.api [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2331.944144] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a3aec4-ddb7-0474-5e6b-54aeb997ce8e" [ 2331.944144] env[62684]: _type = "Task" [ 2331.944144] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2331.951927] env[62684]: DEBUG oslo_vmware.api [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a3aec4-ddb7-0474-5e6b-54aeb997ce8e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2332.168781] env[62684]: DEBUG oslo_concurrency.lockutils [req-2e5e75e0-168e-495e-aac0-8e268bfebbf8 req-6bad2843-b4f1-429d-8a82-7700b1f22ac8 service nova] Releasing lock "refresh_cache-f7b61e23-fe0d-41fb-9100-d07cd8cb2d04" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2332.231106] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2f0d359-350b-4a94-a982-97ce53b605f6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.249858] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-299af864-5e40-48c1-8c62-bce163f5dfe0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.256740] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Updating instance '69d26980-f42d-4d35-8de3-a85d7a6f0a11' progress to 83 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2332.438851] env[62684]: DEBUG oslo_vmware.api [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053786, 'name': ReconfigVM_Task, 'duration_secs': 0.309695} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2332.439111] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Reconfigured VM instance instance-00000070 to attach disk [datastore1] b88d9418-7e90-473e-bd9a-18bc398faad0/b88d9418-7e90-473e-bd9a-18bc398faad0.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2332.439737] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c344bfc0-73e4-413d-a684-c5604c12c352 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.448618] env[62684]: DEBUG oslo_vmware.api [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2332.448618] env[62684]: value = "task-2053787" [ 2332.448618] env[62684]: _type = "Task" [ 2332.448618] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2332.455174] env[62684]: DEBUG oslo_vmware.api [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a3aec4-ddb7-0474-5e6b-54aeb997ce8e, 'name': SearchDatastore_Task, 'duration_secs': 0.018949} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2332.455746] env[62684]: DEBUG oslo_concurrency.lockutils [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2332.456008] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2332.456253] env[62684]: DEBUG oslo_concurrency.lockutils [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2332.456407] env[62684]: DEBUG oslo_concurrency.lockutils [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2332.456587] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2332.456832] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-53718891-f123-4162-9b2b-b437de35f906 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.462865] env[62684]: DEBUG oslo_vmware.api [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053787, 'name': Rename_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2332.470486] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2332.470866] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2332.471933] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d218362-deb4-4313-a905-52418713841e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.478059] env[62684]: DEBUG oslo_vmware.api [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2332.478059] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52682b7e-b527-de43-9c9b-e41b1c544880" [ 2332.478059] env[62684]: _type = "Task" [ 2332.478059] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2332.485860] env[62684]: DEBUG oslo_vmware.api [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52682b7e-b527-de43-9c9b-e41b1c544880, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2332.648468] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a9397652-3986-4994-bea2-8f2d4c4919db tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "587edf89-2ea0-4b89-8830-fa766b798398" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.239s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2332.763308] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2332.763664] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6e7ccd8e-67f9-4a9f-911b-324d701581d8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.770513] env[62684]: DEBUG oslo_vmware.api [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2332.770513] env[62684]: value = "task-2053788" [ 2332.770513] env[62684]: _type = "Task" [ 2332.770513] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2332.778304] env[62684]: DEBUG oslo_vmware.api [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053788, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2332.881728] env[62684]: DEBUG oslo_concurrency.lockutils [None req-14bb4336-6b2e-4564-a980-bcee7c1e8c1a tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Acquiring lock "9f1e9ae9-c082-4fbe-bd21-6e14e40962c1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2332.882286] env[62684]: DEBUG oslo_concurrency.lockutils [None req-14bb4336-6b2e-4564-a980-bcee7c1e8c1a tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Lock "9f1e9ae9-c082-4fbe-bd21-6e14e40962c1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2332.882692] env[62684]: DEBUG oslo_concurrency.lockutils [None req-14bb4336-6b2e-4564-a980-bcee7c1e8c1a tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Acquiring lock "9f1e9ae9-c082-4fbe-bd21-6e14e40962c1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2332.882998] env[62684]: DEBUG oslo_concurrency.lockutils [None req-14bb4336-6b2e-4564-a980-bcee7c1e8c1a tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Lock "9f1e9ae9-c082-4fbe-bd21-6e14e40962c1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2332.883293] env[62684]: DEBUG oslo_concurrency.lockutils [None req-14bb4336-6b2e-4564-a980-bcee7c1e8c1a tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Lock "9f1e9ae9-c082-4fbe-bd21-6e14e40962c1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2332.885924] env[62684]: INFO nova.compute.manager [None req-14bb4336-6b2e-4564-a980-bcee7c1e8c1a tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Terminating instance [ 2332.887744] env[62684]: DEBUG nova.compute.manager [None req-14bb4336-6b2e-4564-a980-bcee7c1e8c1a tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2332.887994] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-14bb4336-6b2e-4564-a980-bcee7c1e8c1a tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2332.888945] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d864d952-87d1-45ba-b6e1-789ed50ffe1e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.896295] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-14bb4336-6b2e-4564-a980-bcee7c1e8c1a tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2332.896526] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b8efaae6-1f1f-43cd-8028-2a376657c810 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.903482] env[62684]: DEBUG oslo_vmware.api [None req-14bb4336-6b2e-4564-a980-bcee7c1e8c1a tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for the task: (returnval){ [ 2332.903482] env[62684]: value = "task-2053789" [ 2332.903482] env[62684]: _type = "Task" [ 2332.903482] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2332.910786] env[62684]: DEBUG oslo_vmware.api [None req-14bb4336-6b2e-4564-a980-bcee7c1e8c1a tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053789, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2332.958934] env[62684]: DEBUG oslo_vmware.api [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053787, 'name': Rename_Task, 'duration_secs': 0.171111} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2332.959191] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2332.959445] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-21b2f4d3-3a2c-4d1c-9626-0901893118d3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.965891] env[62684]: DEBUG oslo_vmware.api [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2332.965891] env[62684]: value = "task-2053790" [ 2332.965891] env[62684]: _type = "Task" [ 2332.965891] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2332.973443] env[62684]: DEBUG oslo_vmware.api [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053790, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2332.986458] env[62684]: DEBUG oslo_vmware.api [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52682b7e-b527-de43-9c9b-e41b1c544880, 'name': SearchDatastore_Task, 'duration_secs': 0.039632} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2332.987206] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2df1cc11-3ae7-4087-9e2a-a697825d20d4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.992828] env[62684]: DEBUG oslo_vmware.api [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2332.992828] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52666aab-d20f-bf3b-49a6-8d2e01e71c69" [ 2332.992828] env[62684]: _type = "Task" [ 2332.992828] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2333.002338] env[62684]: DEBUG oslo_vmware.api [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52666aab-d20f-bf3b-49a6-8d2e01e71c69, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2333.280165] env[62684]: DEBUG oslo_vmware.api [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053788, 'name': PowerOnVM_Task, 'duration_secs': 0.399259} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2333.280446] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2333.280672] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-83475c4c-e7eb-4e1e-9698-4ddbedf46c9b tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Updating instance '69d26980-f42d-4d35-8de3-a85d7a6f0a11' progress to 100 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2333.413064] env[62684]: DEBUG oslo_vmware.api [None req-14bb4336-6b2e-4564-a980-bcee7c1e8c1a tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053789, 'name': PowerOffVM_Task, 'duration_secs': 0.167506} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2333.413414] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-14bb4336-6b2e-4564-a980-bcee7c1e8c1a tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2333.413616] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-14bb4336-6b2e-4564-a980-bcee7c1e8c1a tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2333.413922] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f1c7b7dd-f17b-4e78-a0b2-efdd30188905 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.477661] env[62684]: DEBUG oslo_vmware.api [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053790, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2333.502653] env[62684]: DEBUG oslo_vmware.api [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52666aab-d20f-bf3b-49a6-8d2e01e71c69, 'name': SearchDatastore_Task, 'duration_secs': 0.011844} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2333.502909] env[62684]: DEBUG oslo_concurrency.lockutils [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2333.503211] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] f7b61e23-fe0d-41fb-9100-d07cd8cb2d04/f7b61e23-fe0d-41fb-9100-d07cd8cb2d04.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2333.503498] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c063cdaa-216a-40a8-8922-35053ded2f3c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.510487] env[62684]: DEBUG oslo_vmware.api [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2333.510487] env[62684]: value = "task-2053792" [ 2333.510487] env[62684]: _type = "Task" [ 2333.510487] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2333.518091] env[62684]: DEBUG oslo_vmware.api [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053792, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2333.668341] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-14bb4336-6b2e-4564-a980-bcee7c1e8c1a tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2333.668698] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-14bb4336-6b2e-4564-a980-bcee7c1e8c1a tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2333.668825] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-14bb4336-6b2e-4564-a980-bcee7c1e8c1a tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Deleting the datastore file [datastore2] 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2333.669149] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a728d23f-899b-401d-ac34-d00c9ca80bdc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.675718] env[62684]: DEBUG oslo_vmware.api [None req-14bb4336-6b2e-4564-a980-bcee7c1e8c1a tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for the task: (returnval){ [ 2333.675718] env[62684]: value = "task-2053793" [ 2333.675718] env[62684]: _type = "Task" [ 2333.675718] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2333.679884] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6b5bbea8-7991-470e-a1d5-609de1bd8c68 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "587edf89-2ea0-4b89-8830-fa766b798398" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2333.680255] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6b5bbea8-7991-470e-a1d5-609de1bd8c68 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "587edf89-2ea0-4b89-8830-fa766b798398" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2333.680552] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6b5bbea8-7991-470e-a1d5-609de1bd8c68 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "587edf89-2ea0-4b89-8830-fa766b798398-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2333.680843] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6b5bbea8-7991-470e-a1d5-609de1bd8c68 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "587edf89-2ea0-4b89-8830-fa766b798398-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2333.681102] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6b5bbea8-7991-470e-a1d5-609de1bd8c68 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "587edf89-2ea0-4b89-8830-fa766b798398-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2333.683514] env[62684]: INFO nova.compute.manager [None req-6b5bbea8-7991-470e-a1d5-609de1bd8c68 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Terminating instance [ 2333.687812] env[62684]: DEBUG oslo_vmware.api [None req-14bb4336-6b2e-4564-a980-bcee7c1e8c1a tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053793, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2333.688424] env[62684]: DEBUG nova.compute.manager [None req-6b5bbea8-7991-470e-a1d5-609de1bd8c68 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2333.688700] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6b5bbea8-7991-470e-a1d5-609de1bd8c68 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2333.689603] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f95b4be2-b95a-4c0a-aec5-3852abb3ca6e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.696489] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b5bbea8-7991-470e-a1d5-609de1bd8c68 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2333.696745] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d6fc8051-3bf8-4915-a34c-821cd9761f31 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.703132] env[62684]: DEBUG oslo_vmware.api [None req-6b5bbea8-7991-470e-a1d5-609de1bd8c68 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2333.703132] env[62684]: value = "task-2053794" [ 2333.703132] env[62684]: _type = "Task" [ 2333.703132] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2333.711412] env[62684]: DEBUG oslo_vmware.api [None req-6b5bbea8-7991-470e-a1d5-609de1bd8c68 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053794, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2333.978117] env[62684]: DEBUG oslo_vmware.api [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053790, 'name': PowerOnVM_Task, 'duration_secs': 0.576429} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2333.978683] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2333.978909] env[62684]: INFO nova.compute.manager [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Took 10.50 seconds to spawn the instance on the hypervisor. [ 2333.979162] env[62684]: DEBUG nova.compute.manager [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2333.980152] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0877a1c3-f33c-4d7f-b44a-82fece594b23 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.021553] env[62684]: DEBUG oslo_vmware.api [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053792, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2334.185682] env[62684]: DEBUG oslo_vmware.api [None req-14bb4336-6b2e-4564-a980-bcee7c1e8c1a tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Task: {'id': task-2053793, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.297926} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2334.185941] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-14bb4336-6b2e-4564-a980-bcee7c1e8c1a tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2334.186153] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-14bb4336-6b2e-4564-a980-bcee7c1e8c1a tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2334.186334] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-14bb4336-6b2e-4564-a980-bcee7c1e8c1a tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2334.186513] env[62684]: INFO nova.compute.manager [None req-14bb4336-6b2e-4564-a980-bcee7c1e8c1a tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Took 1.30 seconds to destroy the instance on the hypervisor. [ 2334.186762] env[62684]: DEBUG oslo.service.loopingcall [None req-14bb4336-6b2e-4564-a980-bcee7c1e8c1a tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2334.186960] env[62684]: DEBUG nova.compute.manager [-] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2334.187066] env[62684]: DEBUG nova.network.neutron [-] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2334.212967] env[62684]: DEBUG oslo_vmware.api [None req-6b5bbea8-7991-470e-a1d5-609de1bd8c68 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053794, 'name': PowerOffVM_Task, 'duration_secs': 0.33392} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2334.213521] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b5bbea8-7991-470e-a1d5-609de1bd8c68 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2334.213706] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6b5bbea8-7991-470e-a1d5-609de1bd8c68 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2334.213959] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-72241cdc-0378-4c5b-9b19-50e624900961 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.362183] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6b5bbea8-7991-470e-a1d5-609de1bd8c68 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2334.362429] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6b5bbea8-7991-470e-a1d5-609de1bd8c68 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2334.362616] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b5bbea8-7991-470e-a1d5-609de1bd8c68 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Deleting the datastore file [datastore2] 587edf89-2ea0-4b89-8830-fa766b798398 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2334.362903] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b5bb8b80-1846-4502-ba8c-52578173bec5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.369696] env[62684]: DEBUG oslo_vmware.api [None req-6b5bbea8-7991-470e-a1d5-609de1bd8c68 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2334.369696] env[62684]: value = "task-2053796" [ 2334.369696] env[62684]: _type = "Task" [ 2334.369696] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2334.378804] env[62684]: DEBUG oslo_vmware.api [None req-6b5bbea8-7991-470e-a1d5-609de1bd8c68 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053796, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2334.500238] env[62684]: INFO nova.compute.manager [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Took 15.76 seconds to build instance. [ 2334.523373] env[62684]: DEBUG oslo_vmware.api [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053792, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.553517} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2334.524567] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] f7b61e23-fe0d-41fb-9100-d07cd8cb2d04/f7b61e23-fe0d-41fb-9100-d07cd8cb2d04.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2334.524809] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2334.526161] env[62684]: DEBUG nova.compute.manager [req-532de994-17b0-48d8-b241-f7c555b7dac7 req-7d9aad57-25a7-430b-9705-77db06d4a934 service nova] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Received event network-vif-deleted-61adf5cc-1692-4079-b909-b15313ce9680 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2334.526388] env[62684]: INFO nova.compute.manager [req-532de994-17b0-48d8-b241-f7c555b7dac7 req-7d9aad57-25a7-430b-9705-77db06d4a934 service nova] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Neutron deleted interface 61adf5cc-1692-4079-b909-b15313ce9680; detaching it from the instance and deleting it from the info cache [ 2334.526582] env[62684]: DEBUG nova.network.neutron [req-532de994-17b0-48d8-b241-f7c555b7dac7 req-7d9aad57-25a7-430b-9705-77db06d4a934 service nova] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2334.528082] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-92cf578b-4a8f-4d6f-bea8-aaedab92ba48 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.537810] env[62684]: DEBUG oslo_vmware.api [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2334.537810] env[62684]: value = "task-2053797" [ 2334.537810] env[62684]: _type = "Task" [ 2334.537810] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2334.547217] env[62684]: DEBUG oslo_vmware.api [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053797, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2334.806901] env[62684]: DEBUG nova.compute.manager [req-e9d39d0c-de24-4ca7-a166-3b0c91541b81 req-1277990c-69c6-44e6-8c05-c4e24eb0d3c0 service nova] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Received event network-changed-7e4b9e76-bf05-4ee7-b25c-922484094be0 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2334.807375] env[62684]: DEBUG nova.compute.manager [req-e9d39d0c-de24-4ca7-a166-3b0c91541b81 req-1277990c-69c6-44e6-8c05-c4e24eb0d3c0 service nova] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Refreshing instance network info cache due to event network-changed-7e4b9e76-bf05-4ee7-b25c-922484094be0. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2334.807653] env[62684]: DEBUG oslo_concurrency.lockutils [req-e9d39d0c-de24-4ca7-a166-3b0c91541b81 req-1277990c-69c6-44e6-8c05-c4e24eb0d3c0 service nova] Acquiring lock "refresh_cache-b88d9418-7e90-473e-bd9a-18bc398faad0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2334.807783] env[62684]: DEBUG oslo_concurrency.lockutils [req-e9d39d0c-de24-4ca7-a166-3b0c91541b81 req-1277990c-69c6-44e6-8c05-c4e24eb0d3c0 service nova] Acquired lock "refresh_cache-b88d9418-7e90-473e-bd9a-18bc398faad0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2334.807957] env[62684]: DEBUG nova.network.neutron [req-e9d39d0c-de24-4ca7-a166-3b0c91541b81 req-1277990c-69c6-44e6-8c05-c4e24eb0d3c0 service nova] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Refreshing network info cache for port 7e4b9e76-bf05-4ee7-b25c-922484094be0 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2334.879845] env[62684]: DEBUG oslo_vmware.api [None req-6b5bbea8-7991-470e-a1d5-609de1bd8c68 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053796, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.176516} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2334.880376] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b5bbea8-7991-470e-a1d5-609de1bd8c68 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2334.880747] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6b5bbea8-7991-470e-a1d5-609de1bd8c68 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2334.881028] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6b5bbea8-7991-470e-a1d5-609de1bd8c68 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2334.881294] env[62684]: INFO nova.compute.manager [None req-6b5bbea8-7991-470e-a1d5-609de1bd8c68 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Took 1.19 seconds to destroy the instance on the hypervisor. [ 2334.881641] env[62684]: DEBUG oslo.service.loopingcall [None req-6b5bbea8-7991-470e-a1d5-609de1bd8c68 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2334.881909] env[62684]: DEBUG nova.compute.manager [-] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2334.882054] env[62684]: DEBUG nova.network.neutron [-] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2335.003367] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2811114a-899b-4aff-be5e-ce6c7104298d tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "b88d9418-7e90-473e-bd9a-18bc398faad0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.267s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2335.005710] env[62684]: DEBUG nova.network.neutron [-] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2335.029605] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3dbc8531-ff5f-48ac-a61c-a50937dbc50f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.039027] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afc9f807-5399-4c4d-b93f-7dc3a02e5f34 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.057890] env[62684]: DEBUG oslo_vmware.api [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053797, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.357568} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2335.058168] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2335.059015] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d3dcd5d-1f85-4eaf-82b9-a5818e65cd97 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.080264] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] f7b61e23-fe0d-41fb-9100-d07cd8cb2d04/f7b61e23-fe0d-41fb-9100-d07cd8cb2d04.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2335.088518] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e46a18da-4841-432a-9b6d-80b3ea68b6ce {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.101691] env[62684]: DEBUG nova.compute.manager [req-532de994-17b0-48d8-b241-f7c555b7dac7 req-7d9aad57-25a7-430b-9705-77db06d4a934 service nova] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Detach interface failed, port_id=61adf5cc-1692-4079-b909-b15313ce9680, reason: Instance 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1 could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2335.107696] env[62684]: DEBUG oslo_vmware.api [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2335.107696] env[62684]: value = "task-2053798" [ 2335.107696] env[62684]: _type = "Task" [ 2335.107696] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2335.115811] env[62684]: DEBUG oslo_vmware.api [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053798, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2335.509097] env[62684]: INFO nova.compute.manager [-] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Took 1.32 seconds to deallocate network for instance. [ 2335.617750] env[62684]: DEBUG oslo_vmware.api [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053798, 'name': ReconfigVM_Task, 'duration_secs': 0.42623} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2335.618067] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Reconfigured VM instance instance-00000071 to attach disk [datastore1] f7b61e23-fe0d-41fb-9100-d07cd8cb2d04/f7b61e23-fe0d-41fb-9100-d07cd8cb2d04.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2335.618703] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bddb14e6-0e12-4bfc-aae9-1d1fcccdaa1b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.621163] env[62684]: DEBUG nova.network.neutron [req-e9d39d0c-de24-4ca7-a166-3b0c91541b81 req-1277990c-69c6-44e6-8c05-c4e24eb0d3c0 service nova] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Updated VIF entry in instance network info cache for port 7e4b9e76-bf05-4ee7-b25c-922484094be0. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2335.621503] env[62684]: DEBUG nova.network.neutron [req-e9d39d0c-de24-4ca7-a166-3b0c91541b81 req-1277990c-69c6-44e6-8c05-c4e24eb0d3c0 service nova] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Updating instance_info_cache with network_info: [{"id": "7e4b9e76-bf05-4ee7-b25c-922484094be0", "address": "fa:16:3e:03:9c:84", "network": {"id": "1751424b-54a9-4879-9f32-aa15a9bb632c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-120070593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "263c101fcc5e493789b79dfd1ba97cc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92e4d027-e755-417b-8eea-9a8f24b85140", "external-id": "nsx-vlan-transportzone-756", "segmentation_id": 756, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e4b9e76-bf", "ovs_interfaceid": "7e4b9e76-bf05-4ee7-b25c-922484094be0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2335.628063] env[62684]: DEBUG oslo_vmware.api [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2335.628063] env[62684]: value = "task-2053799" [ 2335.628063] env[62684]: _type = "Task" [ 2335.628063] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2335.638255] env[62684]: DEBUG oslo_vmware.api [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053799, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2335.681422] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "69d26980-f42d-4d35-8de3-a85d7a6f0a11" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2335.681686] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "69d26980-f42d-4d35-8de3-a85d7a6f0a11" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2335.681930] env[62684]: DEBUG nova.compute.manager [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Going to confirm migration 6 {{(pid=62684) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 2335.916053] env[62684]: DEBUG nova.network.neutron [-] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2336.016966] env[62684]: DEBUG oslo_concurrency.lockutils [None req-14bb4336-6b2e-4564-a980-bcee7c1e8c1a tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2336.016966] env[62684]: DEBUG oslo_concurrency.lockutils [None req-14bb4336-6b2e-4564-a980-bcee7c1e8c1a tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2336.016966] env[62684]: DEBUG nova.objects.instance [None req-14bb4336-6b2e-4564-a980-bcee7c1e8c1a tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Lazy-loading 'resources' on Instance uuid 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2336.124789] env[62684]: DEBUG oslo_concurrency.lockutils [req-e9d39d0c-de24-4ca7-a166-3b0c91541b81 req-1277990c-69c6-44e6-8c05-c4e24eb0d3c0 service nova] Releasing lock "refresh_cache-b88d9418-7e90-473e-bd9a-18bc398faad0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2336.138188] env[62684]: DEBUG oslo_vmware.api [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053799, 'name': Rename_Task, 'duration_secs': 0.329869} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2336.138463] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2336.138720] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-91041c91-1e2e-4f6e-8f4e-226e12f6ef2a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2336.145074] env[62684]: DEBUG oslo_vmware.api [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2336.145074] env[62684]: value = "task-2053800" [ 2336.145074] env[62684]: _type = "Task" [ 2336.145074] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2336.152700] env[62684]: DEBUG oslo_vmware.api [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053800, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2336.220141] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "refresh_cache-69d26980-f42d-4d35-8de3-a85d7a6f0a11" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2336.220387] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquired lock "refresh_cache-69d26980-f42d-4d35-8de3-a85d7a6f0a11" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2336.220593] env[62684]: DEBUG nova.network.neutron [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2336.220890] env[62684]: DEBUG nova.objects.instance [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lazy-loading 'info_cache' on Instance uuid 69d26980-f42d-4d35-8de3-a85d7a6f0a11 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2336.423011] env[62684]: INFO nova.compute.manager [-] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Took 1.54 seconds to deallocate network for instance. [ 2336.558476] env[62684]: DEBUG nova.compute.manager [req-57633ef9-8a94-4283-bdb3-1bd380b380b7 req-ef708f77-cb93-40b7-90b3-1ac2260e9a0a service nova] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Received event network-vif-deleted-91d5100c-0d94-42a3-a4f2-5055bd108b50 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2336.624869] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c886021-94a3-4d5d-a8f9-75ee32749aaf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2336.632873] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33f5cc2b-b7ca-45bf-bbe7-87645589a06a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2336.665958] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d95f24b-6787-4f07-922c-be23f4ddf4e4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2336.676063] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-038a75a1-ce46-4fdb-b15b-f9398489abd0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2336.679674] env[62684]: DEBUG oslo_vmware.api [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053800, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2336.689160] env[62684]: DEBUG nova.compute.provider_tree [None req-14bb4336-6b2e-4564-a980-bcee7c1e8c1a tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2336.929746] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6b5bbea8-7991-470e-a1d5-609de1bd8c68 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2337.171712] env[62684]: DEBUG oslo_vmware.api [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053800, 'name': PowerOnVM_Task, 'duration_secs': 0.869415} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2337.172025] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2337.172248] env[62684]: INFO nova.compute.manager [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Took 8.79 seconds to spawn the instance on the hypervisor. [ 2337.172434] env[62684]: DEBUG nova.compute.manager [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2337.173220] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-042afc75-6d77-47f4-8567-c2954730f022 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2337.192452] env[62684]: DEBUG nova.scheduler.client.report [None req-14bb4336-6b2e-4564-a980-bcee7c1e8c1a tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2337.418083] env[62684]: DEBUG nova.network.neutron [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Updating instance_info_cache with network_info: [{"id": "0d202452-add9-4ae7-b035-6554f287d049", "address": "fa:16:3e:3b:e0:ab", "network": {"id": "b24dd0c0-a394-4ca6-a79a-94535bc1df6f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2023102141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "607a0aa1049640d882d7dd490f5f98ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d202452-ad", "ovs_interfaceid": "0d202452-add9-4ae7-b035-6554f287d049", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2337.688503] env[62684]: INFO nova.compute.manager [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Took 17.53 seconds to build instance. [ 2337.696428] env[62684]: DEBUG oslo_concurrency.lockutils [None req-14bb4336-6b2e-4564-a980-bcee7c1e8c1a tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.680s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2337.698803] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6b5bbea8-7991-470e-a1d5-609de1bd8c68 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.769s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2337.699069] env[62684]: DEBUG nova.objects.instance [None req-6b5bbea8-7991-470e-a1d5-609de1bd8c68 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lazy-loading 'resources' on Instance uuid 587edf89-2ea0-4b89-8830-fa766b798398 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2337.714871] env[62684]: INFO nova.scheduler.client.report [None req-14bb4336-6b2e-4564-a980-bcee7c1e8c1a tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Deleted allocations for instance 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1 [ 2337.920953] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Releasing lock "refresh_cache-69d26980-f42d-4d35-8de3-a85d7a6f0a11" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2337.921243] env[62684]: DEBUG nova.objects.instance [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lazy-loading 'migration_context' on Instance uuid 69d26980-f42d-4d35-8de3-a85d7a6f0a11 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2338.191296] env[62684]: DEBUG oslo_concurrency.lockutils [None req-962f0cc2-3503-4e9f-acb5-c44dfd0f5c17 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "f7b61e23-fe0d-41fb-9100-d07cd8cb2d04" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.045s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2338.224450] env[62684]: DEBUG oslo_concurrency.lockutils [None req-14bb4336-6b2e-4564-a980-bcee7c1e8c1a tempest-ServersNegativeTestJSON-1876234319 tempest-ServersNegativeTestJSON-1876234319-project-member] Lock "9f1e9ae9-c082-4fbe-bd21-6e14e40962c1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.342s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2338.301293] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b0e302f-5b7b-4cbb-b1e6-ae825a0278a7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2338.308907] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3763afa7-f5b3-4590-a1da-0ed98b74efb5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2338.338383] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69da9048-56be-46ac-bec4-99392f8c37da {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2338.345639] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc8d23cd-dea5-4fef-b8c8-9e6989f77f8d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2338.358896] env[62684]: DEBUG nova.compute.provider_tree [None req-6b5bbea8-7991-470e-a1d5-609de1bd8c68 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2338.424449] env[62684]: DEBUG nova.objects.base [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Object Instance<69d26980-f42d-4d35-8de3-a85d7a6f0a11> lazy-loaded attributes: info_cache,migration_context {{(pid=62684) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2338.425582] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a697a0f3-b385-4349-9983-42af19aa2ce5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2338.446187] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2283fb84-4f8c-44d9-9dcf-19a0e4b327de {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2338.451264] env[62684]: DEBUG oslo_vmware.api [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2338.451264] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]524d67d8-4d2a-c86e-532f-4272660747ad" [ 2338.451264] env[62684]: _type = "Task" [ 2338.451264] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2338.458725] env[62684]: DEBUG oslo_vmware.api [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]524d67d8-4d2a-c86e-532f-4272660747ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2338.621982] env[62684]: DEBUG nova.compute.manager [req-2c354d40-3df3-4168-b369-eb2f684906f1 req-039e8f35-50d9-4a9a-91d1-7b3467507acb service nova] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Received event network-changed-7a61c618-de41-42f8-a8f4-ed5cf615ac72 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2338.622211] env[62684]: DEBUG nova.compute.manager [req-2c354d40-3df3-4168-b369-eb2f684906f1 req-039e8f35-50d9-4a9a-91d1-7b3467507acb service nova] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Refreshing instance network info cache due to event network-changed-7a61c618-de41-42f8-a8f4-ed5cf615ac72. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2338.622495] env[62684]: DEBUG oslo_concurrency.lockutils [req-2c354d40-3df3-4168-b369-eb2f684906f1 req-039e8f35-50d9-4a9a-91d1-7b3467507acb service nova] Acquiring lock "refresh_cache-f7b61e23-fe0d-41fb-9100-d07cd8cb2d04" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2338.622592] env[62684]: DEBUG oslo_concurrency.lockutils [req-2c354d40-3df3-4168-b369-eb2f684906f1 req-039e8f35-50d9-4a9a-91d1-7b3467507acb service nova] Acquired lock "refresh_cache-f7b61e23-fe0d-41fb-9100-d07cd8cb2d04" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2338.622744] env[62684]: DEBUG nova.network.neutron [req-2c354d40-3df3-4168-b369-eb2f684906f1 req-039e8f35-50d9-4a9a-91d1-7b3467507acb service nova] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Refreshing network info cache for port 7a61c618-de41-42f8-a8f4-ed5cf615ac72 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2338.864062] env[62684]: DEBUG nova.scheduler.client.report [None req-6b5bbea8-7991-470e-a1d5-609de1bd8c68 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2338.961943] env[62684]: DEBUG oslo_vmware.api [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]524d67d8-4d2a-c86e-532f-4272660747ad, 'name': SearchDatastore_Task, 'duration_secs': 0.007228} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2338.962483] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2339.367217] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6b5bbea8-7991-470e-a1d5-609de1bd8c68 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.668s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2339.369734] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.407s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2339.397396] env[62684]: INFO nova.scheduler.client.report [None req-6b5bbea8-7991-470e-a1d5-609de1bd8c68 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Deleted allocations for instance 587edf89-2ea0-4b89-8830-fa766b798398 [ 2339.417931] env[62684]: DEBUG nova.network.neutron [req-2c354d40-3df3-4168-b369-eb2f684906f1 req-039e8f35-50d9-4a9a-91d1-7b3467507acb service nova] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Updated VIF entry in instance network info cache for port 7a61c618-de41-42f8-a8f4-ed5cf615ac72. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2339.418402] env[62684]: DEBUG nova.network.neutron [req-2c354d40-3df3-4168-b369-eb2f684906f1 req-039e8f35-50d9-4a9a-91d1-7b3467507acb service nova] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Updating instance_info_cache with network_info: [{"id": "7a61c618-de41-42f8-a8f4-ed5cf615ac72", "address": "fa:16:3e:41:aa:33", "network": {"id": "8136a664-f757-43b3-a2fa-bacdf2e9566c", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1799567463-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "947e7359aaba456fa1763f4dc8e9d359", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cc30a16-f070-421c-964e-50c9aa32f17a", "external-id": "nsx-vlan-transportzone-424", "segmentation_id": 424, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a61c618-de", "ovs_interfaceid": "7a61c618-de41-42f8-a8f4-ed5cf615ac72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2339.908380] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6b5bbea8-7991-470e-a1d5-609de1bd8c68 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "587edf89-2ea0-4b89-8830-fa766b798398" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.228s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2339.920982] env[62684]: DEBUG oslo_concurrency.lockutils [req-2c354d40-3df3-4168-b369-eb2f684906f1 req-039e8f35-50d9-4a9a-91d1-7b3467507acb service nova] Releasing lock "refresh_cache-f7b61e23-fe0d-41fb-9100-d07cd8cb2d04" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2339.973455] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d147dec6-6881-482a-aef2-cbe5deee2dd3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2339.984731] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4049863-e878-4d2d-825b-0ac1ee4211d2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2340.021534] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ee631f0-111d-4905-a6fc-c54e7ba75e5e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2340.029186] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b07f129-319e-4175-bc09-904aa144a517 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2340.042780] env[62684]: DEBUG nova.compute.provider_tree [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2340.546692] env[62684]: DEBUG nova.scheduler.client.report [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2341.204514] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "b0ddbec0-d578-46df-93fd-9d38c939bd77" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2341.204771] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "b0ddbec0-d578-46df-93fd-9d38c939bd77" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2341.556773] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.187s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2341.707774] env[62684]: DEBUG nova.compute.manager [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2342.106812] env[62684]: INFO nova.scheduler.client.report [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Deleted allocation for migration 48d12785-9a2b-4109-9c3d-ce99edbcc23c [ 2342.226708] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2342.227049] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2342.228505] env[62684]: INFO nova.compute.claims [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2342.612462] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "69d26980-f42d-4d35-8de3-a85d7a6f0a11" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.931s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2343.315632] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d60bf6aa-1622-48a6-9de6-85a1107a51f8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2343.323468] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a25019f-eae5-4d63-a8b2-3626053fe8eb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2343.353692] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52c6d965-7de7-4083-88bb-07a08068d20e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2343.360625] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-009234e5-ba2d-4c28-83a9-c17d1d945aa4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2343.373507] env[62684]: DEBUG nova.compute.provider_tree [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2343.706396] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "69d26980-f42d-4d35-8de3-a85d7a6f0a11" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2343.706743] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "69d26980-f42d-4d35-8de3-a85d7a6f0a11" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2343.707252] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "69d26980-f42d-4d35-8de3-a85d7a6f0a11-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2343.707463] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "69d26980-f42d-4d35-8de3-a85d7a6f0a11-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2343.707641] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "69d26980-f42d-4d35-8de3-a85d7a6f0a11-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2343.709706] env[62684]: INFO nova.compute.manager [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Terminating instance [ 2343.711385] env[62684]: DEBUG nova.compute.manager [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2343.711581] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2343.712417] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1615826-2195-441a-a3d5-1d49ddd458eb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2343.719378] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2343.719594] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f17cb2f7-bf6c-4c0d-b8c5-fdca7c7c2979 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2343.725959] env[62684]: DEBUG oslo_vmware.api [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2343.725959] env[62684]: value = "task-2053801" [ 2343.725959] env[62684]: _type = "Task" [ 2343.725959] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2343.876471] env[62684]: DEBUG nova.scheduler.client.report [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2344.235469] env[62684]: DEBUG oslo_vmware.api [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053801, 'name': PowerOffVM_Task, 'duration_secs': 0.211468} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2344.235680] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2344.235854] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2344.236133] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-501f5a21-80aa-40a4-b501-b6a7bc60b480 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.381575] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.154s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2344.382182] env[62684]: DEBUG nova.compute.manager [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2344.887128] env[62684]: DEBUG nova.compute.utils [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2344.888901] env[62684]: DEBUG nova.compute.manager [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2344.889100] env[62684]: DEBUG nova.network.neutron [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2344.934602] env[62684]: DEBUG nova.policy [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2fab3230b61d440e93d1d0a975115405', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '27d04006afc747e19ad87238bfdbaad1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2345.165157] env[62684]: DEBUG nova.network.neutron [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Successfully created port: 47cf42e1-cff0-4bcb-9b54-03051121bd6b {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2345.392286] env[62684]: DEBUG nova.compute.manager [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2346.401713] env[62684]: DEBUG nova.compute.manager [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2346.427480] env[62684]: DEBUG nova.virt.hardware [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2346.427750] env[62684]: DEBUG nova.virt.hardware [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2346.427917] env[62684]: DEBUG nova.virt.hardware [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2346.428125] env[62684]: DEBUG nova.virt.hardware [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2346.428279] env[62684]: DEBUG nova.virt.hardware [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2346.428433] env[62684]: DEBUG nova.virt.hardware [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2346.428645] env[62684]: DEBUG nova.virt.hardware [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2346.428809] env[62684]: DEBUG nova.virt.hardware [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2346.428979] env[62684]: DEBUG nova.virt.hardware [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2346.429160] env[62684]: DEBUG nova.virt.hardware [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2346.429338] env[62684]: DEBUG nova.virt.hardware [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2346.430258] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dec75203-f1c7-43b3-874d-9168afe013f7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2346.437918] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09046659-3f61-4a57-b4f9-3aa025a9f6b1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2348.682339] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2348.682629] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2348.682779] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Deleting the datastore file [datastore2] 69d26980-f42d-4d35-8de3-a85d7a6f0a11 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2348.683084] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1f7ac414-ccdd-475a-9d94-c902ccf57bc7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2348.689661] env[62684]: DEBUG oslo_vmware.api [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for the task: (returnval){ [ 2348.689661] env[62684]: value = "task-2053803" [ 2348.689661] env[62684]: _type = "Task" [ 2348.689661] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2348.698173] env[62684]: DEBUG oslo_vmware.api [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053803, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2349.199047] env[62684]: DEBUG oslo_vmware.api [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Task: {'id': task-2053803, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146637} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2349.199319] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2349.199513] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2349.199690] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2349.199875] env[62684]: INFO nova.compute.manager [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Took 5.49 seconds to destroy the instance on the hypervisor. [ 2349.200143] env[62684]: DEBUG oslo.service.loopingcall [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2349.200344] env[62684]: DEBUG nova.compute.manager [-] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2349.200438] env[62684]: DEBUG nova.network.neutron [-] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2349.500384] env[62684]: DEBUG nova.compute.manager [req-3c0e1bb3-3300-4e8b-bdf4-114cda592edc req-13c0f63c-2ac9-4cb0-8ddc-80b96ff309b4 service nova] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Received event network-vif-deleted-0d202452-add9-4ae7-b035-6554f287d049 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2349.500594] env[62684]: INFO nova.compute.manager [req-3c0e1bb3-3300-4e8b-bdf4-114cda592edc req-13c0f63c-2ac9-4cb0-8ddc-80b96ff309b4 service nova] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Neutron deleted interface 0d202452-add9-4ae7-b035-6554f287d049; detaching it from the instance and deleting it from the info cache [ 2349.500788] env[62684]: DEBUG nova.network.neutron [req-3c0e1bb3-3300-4e8b-bdf4-114cda592edc req-13c0f63c-2ac9-4cb0-8ddc-80b96ff309b4 service nova] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2349.674181] env[62684]: DEBUG nova.network.neutron [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Successfully updated port: 47cf42e1-cff0-4bcb-9b54-03051121bd6b {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2349.972838] env[62684]: DEBUG nova.network.neutron [-] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2350.004603] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d0cb55c6-87c8-4a78-9108-8c80b7ab3302 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2350.013535] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf8268ba-d00c-488a-9db5-481bbc6c6697 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2350.040660] env[62684]: DEBUG nova.compute.manager [req-3c0e1bb3-3300-4e8b-bdf4-114cda592edc req-13c0f63c-2ac9-4cb0-8ddc-80b96ff309b4 service nova] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Detach interface failed, port_id=0d202452-add9-4ae7-b035-6554f287d049, reason: Instance 69d26980-f42d-4d35-8de3-a85d7a6f0a11 could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2350.176695] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "refresh_cache-b0ddbec0-d578-46df-93fd-9d38c939bd77" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2350.176857] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquired lock "refresh_cache-b0ddbec0-d578-46df-93fd-9d38c939bd77" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2350.177059] env[62684]: DEBUG nova.network.neutron [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2350.476168] env[62684]: INFO nova.compute.manager [-] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Took 1.28 seconds to deallocate network for instance. [ 2350.714223] env[62684]: DEBUG nova.network.neutron [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2350.839120] env[62684]: DEBUG nova.network.neutron [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Updating instance_info_cache with network_info: [{"id": "47cf42e1-cff0-4bcb-9b54-03051121bd6b", "address": "fa:16:3e:77:e7:e9", "network": {"id": "e177c6d0-ddd5-4029-94af-c8f1b937dd9f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1344612161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27d04006afc747e19ad87238bfdbaad1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "171aeae0-6a27-44fc-bc3d-a2d5581fc702", "external-id": "nsx-vlan-transportzone-410", "segmentation_id": 410, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47cf42e1-cf", "ovs_interfaceid": "47cf42e1-cff0-4bcb-9b54-03051121bd6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2350.984327] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2350.984609] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2350.984803] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2351.008910] env[62684]: INFO nova.scheduler.client.report [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Deleted allocations for instance 69d26980-f42d-4d35-8de3-a85d7a6f0a11 [ 2351.341667] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Releasing lock "refresh_cache-b0ddbec0-d578-46df-93fd-9d38c939bd77" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2351.342023] env[62684]: DEBUG nova.compute.manager [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Instance network_info: |[{"id": "47cf42e1-cff0-4bcb-9b54-03051121bd6b", "address": "fa:16:3e:77:e7:e9", "network": {"id": "e177c6d0-ddd5-4029-94af-c8f1b937dd9f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1344612161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27d04006afc747e19ad87238bfdbaad1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "171aeae0-6a27-44fc-bc3d-a2d5581fc702", "external-id": "nsx-vlan-transportzone-410", "segmentation_id": 410, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47cf42e1-cf", "ovs_interfaceid": "47cf42e1-cff0-4bcb-9b54-03051121bd6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2351.342518] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:77:e7:e9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '171aeae0-6a27-44fc-bc3d-a2d5581fc702', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '47cf42e1-cff0-4bcb-9b54-03051121bd6b', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2351.350217] env[62684]: DEBUG oslo.service.loopingcall [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2351.350433] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2351.350663] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bfc0cf51-5c83-4965-8b33-f2e29c637ee0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2351.371041] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2351.371041] env[62684]: value = "task-2053804" [ 2351.371041] env[62684]: _type = "Task" [ 2351.371041] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2351.379047] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053804, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2351.516946] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bb2ed665-a10b-4095-bc59-ed367112bc4a tempest-DeleteServersTestJSON-370305399 tempest-DeleteServersTestJSON-370305399-project-member] Lock "69d26980-f42d-4d35-8de3-a85d7a6f0a11" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.810s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2351.528586] env[62684]: DEBUG nova.compute.manager [req-55e525a9-19cd-4780-b153-ddc8d84f1158 req-1c322dfa-668f-494d-a2c8-52c24b0e5f54 service nova] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Received event network-vif-plugged-47cf42e1-cff0-4bcb-9b54-03051121bd6b {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2351.528840] env[62684]: DEBUG oslo_concurrency.lockutils [req-55e525a9-19cd-4780-b153-ddc8d84f1158 req-1c322dfa-668f-494d-a2c8-52c24b0e5f54 service nova] Acquiring lock "b0ddbec0-d578-46df-93fd-9d38c939bd77-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2351.529121] env[62684]: DEBUG oslo_concurrency.lockutils [req-55e525a9-19cd-4780-b153-ddc8d84f1158 req-1c322dfa-668f-494d-a2c8-52c24b0e5f54 service nova] Lock "b0ddbec0-d578-46df-93fd-9d38c939bd77-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2351.529291] env[62684]: DEBUG oslo_concurrency.lockutils [req-55e525a9-19cd-4780-b153-ddc8d84f1158 req-1c322dfa-668f-494d-a2c8-52c24b0e5f54 service nova] Lock "b0ddbec0-d578-46df-93fd-9d38c939bd77-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2351.529431] env[62684]: DEBUG nova.compute.manager [req-55e525a9-19cd-4780-b153-ddc8d84f1158 req-1c322dfa-668f-494d-a2c8-52c24b0e5f54 service nova] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] No waiting events found dispatching network-vif-plugged-47cf42e1-cff0-4bcb-9b54-03051121bd6b {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2351.529653] env[62684]: WARNING nova.compute.manager [req-55e525a9-19cd-4780-b153-ddc8d84f1158 req-1c322dfa-668f-494d-a2c8-52c24b0e5f54 service nova] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Received unexpected event network-vif-plugged-47cf42e1-cff0-4bcb-9b54-03051121bd6b for instance with vm_state building and task_state spawning. [ 2351.529755] env[62684]: DEBUG nova.compute.manager [req-55e525a9-19cd-4780-b153-ddc8d84f1158 req-1c322dfa-668f-494d-a2c8-52c24b0e5f54 service nova] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Received event network-changed-47cf42e1-cff0-4bcb-9b54-03051121bd6b {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2351.529911] env[62684]: DEBUG nova.compute.manager [req-55e525a9-19cd-4780-b153-ddc8d84f1158 req-1c322dfa-668f-494d-a2c8-52c24b0e5f54 service nova] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Refreshing instance network info cache due to event network-changed-47cf42e1-cff0-4bcb-9b54-03051121bd6b. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2351.530245] env[62684]: DEBUG oslo_concurrency.lockutils [req-55e525a9-19cd-4780-b153-ddc8d84f1158 req-1c322dfa-668f-494d-a2c8-52c24b0e5f54 service nova] Acquiring lock "refresh_cache-b0ddbec0-d578-46df-93fd-9d38c939bd77" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2351.530414] env[62684]: DEBUG oslo_concurrency.lockutils [req-55e525a9-19cd-4780-b153-ddc8d84f1158 req-1c322dfa-668f-494d-a2c8-52c24b0e5f54 service nova] Acquired lock "refresh_cache-b0ddbec0-d578-46df-93fd-9d38c939bd77" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2351.530580] env[62684]: DEBUG nova.network.neutron [req-55e525a9-19cd-4780-b153-ddc8d84f1158 req-1c322dfa-668f-494d-a2c8-52c24b0e5f54 service nova] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Refreshing network info cache for port 47cf42e1-cff0-4bcb-9b54-03051121bd6b {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2351.882421] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053804, 'name': CreateVM_Task, 'duration_secs': 0.34823} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2351.882621] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2351.883252] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2351.883428] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2351.883818] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2351.884090] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5743cfc4-a02b-4594-9d68-52169f356f49 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2351.888840] env[62684]: DEBUG oslo_vmware.api [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2351.888840] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52bdc039-4fac-cc03-05b6-642e86452c29" [ 2351.888840] env[62684]: _type = "Task" [ 2351.888840] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2351.896061] env[62684]: DEBUG oslo_vmware.api [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52bdc039-4fac-cc03-05b6-642e86452c29, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2352.218810] env[62684]: DEBUG nova.network.neutron [req-55e525a9-19cd-4780-b153-ddc8d84f1158 req-1c322dfa-668f-494d-a2c8-52c24b0e5f54 service nova] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Updated VIF entry in instance network info cache for port 47cf42e1-cff0-4bcb-9b54-03051121bd6b. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2352.219303] env[62684]: DEBUG nova.network.neutron [req-55e525a9-19cd-4780-b153-ddc8d84f1158 req-1c322dfa-668f-494d-a2c8-52c24b0e5f54 service nova] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Updating instance_info_cache with network_info: [{"id": "47cf42e1-cff0-4bcb-9b54-03051121bd6b", "address": "fa:16:3e:77:e7:e9", "network": {"id": "e177c6d0-ddd5-4029-94af-c8f1b937dd9f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1344612161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27d04006afc747e19ad87238bfdbaad1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "171aeae0-6a27-44fc-bc3d-a2d5581fc702", "external-id": "nsx-vlan-transportzone-410", "segmentation_id": 410, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47cf42e1-cf", "ovs_interfaceid": "47cf42e1-cff0-4bcb-9b54-03051121bd6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2352.398654] env[62684]: DEBUG oslo_vmware.api [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52bdc039-4fac-cc03-05b6-642e86452c29, 'name': SearchDatastore_Task, 'duration_secs': 0.010081} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2352.398924] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2352.399188] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2352.399430] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2352.399582] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2352.399762] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2352.400039] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e0e6cb2d-0b06-4986-9bb5-0e12f6641607 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2352.407246] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2352.407421] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2352.408083] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aeb8fdee-c8e1-4e7a-8a03-f989d1f14e6d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2352.412833] env[62684]: DEBUG oslo_vmware.api [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2352.412833] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e70a21-8440-cbd5-94c4-e9cb0212bc75" [ 2352.412833] env[62684]: _type = "Task" [ 2352.412833] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2352.419767] env[62684]: DEBUG oslo_vmware.api [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e70a21-8440-cbd5-94c4-e9cb0212bc75, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2352.721971] env[62684]: DEBUG oslo_concurrency.lockutils [req-55e525a9-19cd-4780-b153-ddc8d84f1158 req-1c322dfa-668f-494d-a2c8-52c24b0e5f54 service nova] Releasing lock "refresh_cache-b0ddbec0-d578-46df-93fd-9d38c939bd77" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2352.922885] env[62684]: DEBUG oslo_vmware.api [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e70a21-8440-cbd5-94c4-e9cb0212bc75, 'name': SearchDatastore_Task, 'duration_secs': 0.007961} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2352.923687] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7827a345-b83a-4723-a3d3-614d7e3ba59a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2352.928641] env[62684]: DEBUG oslo_vmware.api [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2352.928641] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a85000-619d-db94-0a9e-7f7243e0d5af" [ 2352.928641] env[62684]: _type = "Task" [ 2352.928641] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2352.936983] env[62684]: DEBUG oslo_vmware.api [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a85000-619d-db94-0a9e-7f7243e0d5af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2353.439206] env[62684]: DEBUG oslo_vmware.api [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a85000-619d-db94-0a9e-7f7243e0d5af, 'name': SearchDatastore_Task, 'duration_secs': 0.010047} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2353.439552] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2353.439785] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] b0ddbec0-d578-46df-93fd-9d38c939bd77/b0ddbec0-d578-46df-93fd-9d38c939bd77.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2353.440073] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ffadafcb-ab78-4839-ad95-dfe8a35fb317 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2353.447254] env[62684]: DEBUG oslo_vmware.api [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2353.447254] env[62684]: value = "task-2053806" [ 2353.447254] env[62684]: _type = "Task" [ 2353.447254] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2353.454804] env[62684]: DEBUG oslo_vmware.api [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053806, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2353.957667] env[62684]: DEBUG oslo_vmware.api [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053806, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.438585} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2353.958055] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] b0ddbec0-d578-46df-93fd-9d38c939bd77/b0ddbec0-d578-46df-93fd-9d38c939bd77.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2353.958378] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2353.958697] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-64876da5-5a99-45c9-b042-ca0800b4947d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2353.965398] env[62684]: DEBUG oslo_vmware.api [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2353.965398] env[62684]: value = "task-2053807" [ 2353.965398] env[62684]: _type = "Task" [ 2353.965398] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2353.972729] env[62684]: DEBUG oslo_vmware.api [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053807, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2354.475679] env[62684]: DEBUG oslo_vmware.api [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053807, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06566} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2354.476032] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2354.476655] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-679ba2a5-b5af-49d1-afce-adeb70b1a633 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2354.497487] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Reconfiguring VM instance instance-00000072 to attach disk [datastore2] b0ddbec0-d578-46df-93fd-9d38c939bd77/b0ddbec0-d578-46df-93fd-9d38c939bd77.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2354.497721] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-103b0928-efa0-45eb-a957-3100e334d6fc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2354.516122] env[62684]: DEBUG oslo_vmware.api [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2354.516122] env[62684]: value = "task-2053808" [ 2354.516122] env[62684]: _type = "Task" [ 2354.516122] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2354.524657] env[62684]: DEBUG oslo_vmware.api [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053808, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2355.025920] env[62684]: DEBUG oslo_vmware.api [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053808, 'name': ReconfigVM_Task, 'duration_secs': 0.292296} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2355.026204] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Reconfigured VM instance instance-00000072 to attach disk [datastore2] b0ddbec0-d578-46df-93fd-9d38c939bd77/b0ddbec0-d578-46df-93fd-9d38c939bd77.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2355.026804] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a6538bc2-63d0-468c-ba2c-be6894a73d57 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2355.033083] env[62684]: DEBUG oslo_vmware.api [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2355.033083] env[62684]: value = "task-2053809" [ 2355.033083] env[62684]: _type = "Task" [ 2355.033083] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2355.040528] env[62684]: DEBUG oslo_vmware.api [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053809, 'name': Rename_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2355.542973] env[62684]: DEBUG oslo_vmware.api [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053809, 'name': Rename_Task, 'duration_secs': 0.153891} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2355.543354] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2355.543504] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0dba4dd1-819d-453b-8808-69c23e6deceb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2355.550156] env[62684]: DEBUG oslo_vmware.api [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2355.550156] env[62684]: value = "task-2053810" [ 2355.550156] env[62684]: _type = "Task" [ 2355.550156] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2355.557367] env[62684]: DEBUG oslo_vmware.api [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053810, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2356.060910] env[62684]: DEBUG oslo_vmware.api [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053810, 'name': PowerOnVM_Task, 'duration_secs': 0.430657} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2356.060910] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2356.061098] env[62684]: INFO nova.compute.manager [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Took 9.66 seconds to spawn the instance on the hypervisor. [ 2356.061205] env[62684]: DEBUG nova.compute.manager [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2356.061992] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-317638b7-005f-45c9-af3c-f6a8ec00a8b0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2356.580685] env[62684]: INFO nova.compute.manager [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Took 14.37 seconds to build instance. [ 2357.083315] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5e1df5f4-600b-44fd-932c-bac0d2d248b0 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "b0ddbec0-d578-46df-93fd-9d38c939bd77" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.878s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2357.660284] env[62684]: DEBUG nova.compute.manager [req-accbafe5-511a-4be6-9424-d3b2ae6825bf req-9180d93a-08e0-419b-a860-06fe9e59cfb0 service nova] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Received event network-changed-47cf42e1-cff0-4bcb-9b54-03051121bd6b {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2357.660493] env[62684]: DEBUG nova.compute.manager [req-accbafe5-511a-4be6-9424-d3b2ae6825bf req-9180d93a-08e0-419b-a860-06fe9e59cfb0 service nova] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Refreshing instance network info cache due to event network-changed-47cf42e1-cff0-4bcb-9b54-03051121bd6b. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2357.660713] env[62684]: DEBUG oslo_concurrency.lockutils [req-accbafe5-511a-4be6-9424-d3b2ae6825bf req-9180d93a-08e0-419b-a860-06fe9e59cfb0 service nova] Acquiring lock "refresh_cache-b0ddbec0-d578-46df-93fd-9d38c939bd77" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2357.661022] env[62684]: DEBUG oslo_concurrency.lockutils [req-accbafe5-511a-4be6-9424-d3b2ae6825bf req-9180d93a-08e0-419b-a860-06fe9e59cfb0 service nova] Acquired lock "refresh_cache-b0ddbec0-d578-46df-93fd-9d38c939bd77" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2357.661458] env[62684]: DEBUG nova.network.neutron [req-accbafe5-511a-4be6-9424-d3b2ae6825bf req-9180d93a-08e0-419b-a860-06fe9e59cfb0 service nova] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Refreshing network info cache for port 47cf42e1-cff0-4bcb-9b54-03051121bd6b {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2358.427454] env[62684]: DEBUG nova.network.neutron [req-accbafe5-511a-4be6-9424-d3b2ae6825bf req-9180d93a-08e0-419b-a860-06fe9e59cfb0 service nova] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Updated VIF entry in instance network info cache for port 47cf42e1-cff0-4bcb-9b54-03051121bd6b. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2358.427953] env[62684]: DEBUG nova.network.neutron [req-accbafe5-511a-4be6-9424-d3b2ae6825bf req-9180d93a-08e0-419b-a860-06fe9e59cfb0 service nova] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Updating instance_info_cache with network_info: [{"id": "47cf42e1-cff0-4bcb-9b54-03051121bd6b", "address": "fa:16:3e:77:e7:e9", "network": {"id": "e177c6d0-ddd5-4029-94af-c8f1b937dd9f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1344612161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27d04006afc747e19ad87238bfdbaad1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "171aeae0-6a27-44fc-bc3d-a2d5581fc702", "external-id": "nsx-vlan-transportzone-410", "segmentation_id": 410, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47cf42e1-cf", "ovs_interfaceid": "47cf42e1-cff0-4bcb-9b54-03051121bd6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2358.641567] env[62684]: DEBUG oslo_concurrency.lockutils [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Acquiring lock "ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2358.641825] env[62684]: DEBUG oslo_concurrency.lockutils [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Lock "ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2358.931579] env[62684]: DEBUG oslo_concurrency.lockutils [req-accbafe5-511a-4be6-9424-d3b2ae6825bf req-9180d93a-08e0-419b-a860-06fe9e59cfb0 service nova] Releasing lock "refresh_cache-b0ddbec0-d578-46df-93fd-9d38c939bd77" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2358.986582] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Acquiring lock "d540b43f-5bf9-47df-b319-97a1bae7ffc0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2358.986832] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Lock "d540b43f-5bf9-47df-b319-97a1bae7ffc0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2359.144529] env[62684]: DEBUG nova.compute.manager [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2359.327431] env[62684]: DEBUG oslo_concurrency.lockutils [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Acquiring lock "7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2359.327760] env[62684]: DEBUG oslo_concurrency.lockutils [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Lock "7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2359.489382] env[62684]: DEBUG nova.compute.manager [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2359.666336] env[62684]: DEBUG oslo_concurrency.lockutils [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2359.666576] env[62684]: DEBUG oslo_concurrency.lockutils [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2359.668046] env[62684]: INFO nova.compute.claims [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2359.830385] env[62684]: DEBUG nova.compute.manager [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2360.009151] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2360.350976] env[62684]: DEBUG oslo_concurrency.lockutils [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2360.771705] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2b4326d-248c-4e23-90ca-43cbcb4849ed {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.779861] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afe1afe1-9fcb-487c-8c85-0fcf585ca11c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.810041] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e2e86d0-a71b-4d5c-8cb5-f99170c70388 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.816990] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6df11d7-6ca4-4365-83b2-0124ccc8dbb9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.829720] env[62684]: DEBUG nova.compute.provider_tree [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2361.333020] env[62684]: DEBUG nova.scheduler.client.report [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2361.838441] env[62684]: DEBUG oslo_concurrency.lockutils [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.172s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2361.839015] env[62684]: DEBUG nova.compute.manager [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2361.841726] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.833s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2361.843606] env[62684]: INFO nova.compute.claims [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2362.348286] env[62684]: DEBUG nova.compute.utils [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2362.349728] env[62684]: DEBUG nova.compute.manager [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2362.349896] env[62684]: DEBUG nova.network.neutron [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2362.390999] env[62684]: DEBUG nova.policy [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '13323ce0378a444a8795f28472136586', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2c84f03a79784aa38a063e874dcc2c0a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2362.629162] env[62684]: DEBUG nova.network.neutron [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Successfully created port: b5464c95-056f-4730-8974-808275aae0f2 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2362.853318] env[62684]: DEBUG nova.compute.manager [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2362.949592] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebe8e976-2cd8-4ec4-8302-7fed8da39601 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.957607] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ab6193d-86ab-4d6b-91a1-26085745fab6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.988189] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc95f2ff-39c0-46a2-a679-ed2e0ac71c2f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.995326] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3fe7df3-5832-475d-a091-b10919aa4760 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2363.008588] env[62684]: DEBUG nova.compute.provider_tree [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2363.511782] env[62684]: DEBUG nova.scheduler.client.report [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2363.864806] env[62684]: DEBUG nova.compute.manager [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2363.890957] env[62684]: DEBUG nova.virt.hardware [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2363.891272] env[62684]: DEBUG nova.virt.hardware [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2363.891437] env[62684]: DEBUG nova.virt.hardware [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2363.891626] env[62684]: DEBUG nova.virt.hardware [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2363.891777] env[62684]: DEBUG nova.virt.hardware [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2363.891931] env[62684]: DEBUG nova.virt.hardware [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2363.892159] env[62684]: DEBUG nova.virt.hardware [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2363.892331] env[62684]: DEBUG nova.virt.hardware [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2363.892504] env[62684]: DEBUG nova.virt.hardware [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2363.892678] env[62684]: DEBUG nova.virt.hardware [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2363.892873] env[62684]: DEBUG nova.virt.hardware [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2363.893796] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f6b4897-257d-47eb-a371-5dc1eaf33741 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2363.901976] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1825177d-99e0-4bfd-880d-28b65b89f185 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2364.016585] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.175s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2364.017157] env[62684]: DEBUG nova.compute.manager [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2364.019882] env[62684]: DEBUG oslo_concurrency.lockutils [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.669s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2364.021365] env[62684]: INFO nova.compute.claims [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2364.526015] env[62684]: DEBUG nova.compute.utils [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2364.529412] env[62684]: DEBUG nova.compute.manager [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2364.529567] env[62684]: DEBUG nova.network.neutron [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2364.582493] env[62684]: DEBUG nova.policy [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '13323ce0378a444a8795f28472136586', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2c84f03a79784aa38a063e874dcc2c0a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2364.824518] env[62684]: DEBUG nova.network.neutron [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Successfully created port: 664c9b72-448f-42d5-bb01-db8d2006bcb8 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2365.030065] env[62684]: DEBUG nova.compute.manager [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2365.149363] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0126d02-02f7-4800-b3c4-7708365fe54b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2365.157339] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4538f6d6-d317-4e02-af19-d7f90399242e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2365.187833] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92aaccde-3460-466b-a86c-30ae033e7c0b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2365.195189] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90f796fe-31ae-4ee8-b247-28d606446259 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2365.210458] env[62684]: DEBUG nova.compute.provider_tree [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2365.714787] env[62684]: DEBUG nova.scheduler.client.report [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2366.038837] env[62684]: DEBUG nova.compute.manager [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2366.065042] env[62684]: DEBUG nova.virt.hardware [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2366.065199] env[62684]: DEBUG nova.virt.hardware [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2366.065249] env[62684]: DEBUG nova.virt.hardware [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2366.065436] env[62684]: DEBUG nova.virt.hardware [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2366.065591] env[62684]: DEBUG nova.virt.hardware [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2366.065743] env[62684]: DEBUG nova.virt.hardware [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2366.065958] env[62684]: DEBUG nova.virt.hardware [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2366.066137] env[62684]: DEBUG nova.virt.hardware [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2366.066313] env[62684]: DEBUG nova.virt.hardware [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2366.066486] env[62684]: DEBUG nova.virt.hardware [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2366.066665] env[62684]: DEBUG nova.virt.hardware [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2366.067550] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f85a250f-e791-4e3d-99a0-e9411f56ea11 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2366.075403] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ce6b82c-5f78-4f95-a6a7-99b33a7325c7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2366.219454] env[62684]: DEBUG oslo_concurrency.lockutils [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.199s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2366.220061] env[62684]: DEBUG nova.compute.manager [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2366.724703] env[62684]: DEBUG nova.compute.utils [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2366.726165] env[62684]: DEBUG nova.compute.manager [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2366.726371] env[62684]: DEBUG nova.network.neutron [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2366.771221] env[62684]: DEBUG nova.policy [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '13323ce0378a444a8795f28472136586', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2c84f03a79784aa38a063e874dcc2c0a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2366.999154] env[62684]: DEBUG nova.network.neutron [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Successfully created port: dee84b05-71ef-4a8b-8cfa-64eea8bf277e {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2367.229345] env[62684]: DEBUG nova.compute.manager [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2368.241117] env[62684]: DEBUG nova.compute.manager [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2368.269707] env[62684]: DEBUG nova.virt.hardware [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2368.270154] env[62684]: DEBUG nova.virt.hardware [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2368.270471] env[62684]: DEBUG nova.virt.hardware [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2368.270811] env[62684]: DEBUG nova.virt.hardware [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2368.271113] env[62684]: DEBUG nova.virt.hardware [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2368.271406] env[62684]: DEBUG nova.virt.hardware [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2368.271765] env[62684]: DEBUG nova.virt.hardware [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2368.272054] env[62684]: DEBUG nova.virt.hardware [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2368.272352] env[62684]: DEBUG nova.virt.hardware [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2368.272656] env[62684]: DEBUG nova.virt.hardware [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2368.272945] env[62684]: DEBUG nova.virt.hardware [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2368.274262] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-904f48c3-9d56-471d-a6f3-8079bd64fbcf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2368.286156] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3fd7ec7-f0f2-443d-bbe0-2984da5a9748 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2370.005212] env[62684]: DEBUG nova.compute.manager [req-7a0fadc2-0904-4b2e-a8b3-2f52c23d3dc1 req-588f6a31-0b4c-476f-be3a-151ebc0483ee service nova] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Received event network-vif-plugged-b5464c95-056f-4730-8974-808275aae0f2 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2370.005507] env[62684]: DEBUG oslo_concurrency.lockutils [req-7a0fadc2-0904-4b2e-a8b3-2f52c23d3dc1 req-588f6a31-0b4c-476f-be3a-151ebc0483ee service nova] Acquiring lock "ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2370.005735] env[62684]: DEBUG oslo_concurrency.lockutils [req-7a0fadc2-0904-4b2e-a8b3-2f52c23d3dc1 req-588f6a31-0b4c-476f-be3a-151ebc0483ee service nova] Lock "ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2370.005940] env[62684]: DEBUG oslo_concurrency.lockutils [req-7a0fadc2-0904-4b2e-a8b3-2f52c23d3dc1 req-588f6a31-0b4c-476f-be3a-151ebc0483ee service nova] Lock "ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2370.006078] env[62684]: DEBUG nova.compute.manager [req-7a0fadc2-0904-4b2e-a8b3-2f52c23d3dc1 req-588f6a31-0b4c-476f-be3a-151ebc0483ee service nova] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] No waiting events found dispatching network-vif-plugged-b5464c95-056f-4730-8974-808275aae0f2 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2370.006253] env[62684]: WARNING nova.compute.manager [req-7a0fadc2-0904-4b2e-a8b3-2f52c23d3dc1 req-588f6a31-0b4c-476f-be3a-151ebc0483ee service nova] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Received unexpected event network-vif-plugged-b5464c95-056f-4730-8974-808275aae0f2 for instance with vm_state building and task_state spawning. [ 2370.102811] env[62684]: DEBUG nova.network.neutron [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Successfully updated port: b5464c95-056f-4730-8974-808275aae0f2 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2370.300288] env[62684]: DEBUG nova.network.neutron [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Successfully updated port: 664c9b72-448f-42d5-bb01-db8d2006bcb8 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2370.368635] env[62684]: DEBUG nova.compute.manager [req-4cb7e5e9-2de3-4f4c-aff9-92a72cd3cbe9 req-09734b7b-87f4-4b43-bad3-67f047fb74a8 service nova] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Received event network-vif-plugged-dee84b05-71ef-4a8b-8cfa-64eea8bf277e {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2370.368849] env[62684]: DEBUG oslo_concurrency.lockutils [req-4cb7e5e9-2de3-4f4c-aff9-92a72cd3cbe9 req-09734b7b-87f4-4b43-bad3-67f047fb74a8 service nova] Acquiring lock "7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2370.369076] env[62684]: DEBUG oslo_concurrency.lockutils [req-4cb7e5e9-2de3-4f4c-aff9-92a72cd3cbe9 req-09734b7b-87f4-4b43-bad3-67f047fb74a8 service nova] Lock "7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2370.369253] env[62684]: DEBUG oslo_concurrency.lockutils [req-4cb7e5e9-2de3-4f4c-aff9-92a72cd3cbe9 req-09734b7b-87f4-4b43-bad3-67f047fb74a8 service nova] Lock "7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2370.369427] env[62684]: DEBUG nova.compute.manager [req-4cb7e5e9-2de3-4f4c-aff9-92a72cd3cbe9 req-09734b7b-87f4-4b43-bad3-67f047fb74a8 service nova] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] No waiting events found dispatching network-vif-plugged-dee84b05-71ef-4a8b-8cfa-64eea8bf277e {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2370.369594] env[62684]: WARNING nova.compute.manager [req-4cb7e5e9-2de3-4f4c-aff9-92a72cd3cbe9 req-09734b7b-87f4-4b43-bad3-67f047fb74a8 service nova] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Received unexpected event network-vif-plugged-dee84b05-71ef-4a8b-8cfa-64eea8bf277e for instance with vm_state building and task_state spawning. [ 2370.452441] env[62684]: DEBUG nova.network.neutron [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Successfully updated port: dee84b05-71ef-4a8b-8cfa-64eea8bf277e {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2370.605522] env[62684]: DEBUG oslo_concurrency.lockutils [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Acquiring lock "refresh_cache-ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2370.605678] env[62684]: DEBUG oslo_concurrency.lockutils [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Acquired lock "refresh_cache-ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2370.605831] env[62684]: DEBUG nova.network.neutron [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2370.802571] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Acquiring lock "refresh_cache-d540b43f-5bf9-47df-b319-97a1bae7ffc0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2370.802711] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Acquired lock "refresh_cache-d540b43f-5bf9-47df-b319-97a1bae7ffc0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2370.802842] env[62684]: DEBUG nova.network.neutron [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2370.955431] env[62684]: DEBUG oslo_concurrency.lockutils [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Acquiring lock "refresh_cache-7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2370.955578] env[62684]: DEBUG oslo_concurrency.lockutils [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Acquired lock "refresh_cache-7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2370.955728] env[62684]: DEBUG nova.network.neutron [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2371.136398] env[62684]: DEBUG nova.network.neutron [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2371.257514] env[62684]: DEBUG nova.network.neutron [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Updating instance_info_cache with network_info: [{"id": "b5464c95-056f-4730-8974-808275aae0f2", "address": "fa:16:3e:db:88:25", "network": {"id": "0b2eba7d-f2f8-4db1-b935-e34edffa47b8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-335916832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c84f03a79784aa38a063e874dcc2c0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13d625c9-77ec-4edb-a56b-9f37a314cc39", "external-id": "nsx-vlan-transportzone-358", "segmentation_id": 358, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5464c95-05", "ovs_interfaceid": "b5464c95-056f-4730-8974-808275aae0f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2371.331404] env[62684]: DEBUG nova.network.neutron [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2371.451673] env[62684]: DEBUG nova.network.neutron [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Updating instance_info_cache with network_info: [{"id": "664c9b72-448f-42d5-bb01-db8d2006bcb8", "address": "fa:16:3e:3b:50:b3", "network": {"id": "0b2eba7d-f2f8-4db1-b935-e34edffa47b8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-335916832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c84f03a79784aa38a063e874dcc2c0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13d625c9-77ec-4edb-a56b-9f37a314cc39", "external-id": "nsx-vlan-transportzone-358", "segmentation_id": 358, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap664c9b72-44", "ovs_interfaceid": "664c9b72-448f-42d5-bb01-db8d2006bcb8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2371.483914] env[62684]: DEBUG nova.network.neutron [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2371.609797] env[62684]: DEBUG nova.network.neutron [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Updating instance_info_cache with network_info: [{"id": "dee84b05-71ef-4a8b-8cfa-64eea8bf277e", "address": "fa:16:3e:8c:22:81", "network": {"id": "0b2eba7d-f2f8-4db1-b935-e34edffa47b8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-335916832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c84f03a79784aa38a063e874dcc2c0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13d625c9-77ec-4edb-a56b-9f37a314cc39", "external-id": "nsx-vlan-transportzone-358", "segmentation_id": 358, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdee84b05-71", "ovs_interfaceid": "dee84b05-71ef-4a8b-8cfa-64eea8bf277e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2371.760241] env[62684]: DEBUG oslo_concurrency.lockutils [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Releasing lock "refresh_cache-ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2371.760553] env[62684]: DEBUG nova.compute.manager [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Instance network_info: |[{"id": "b5464c95-056f-4730-8974-808275aae0f2", "address": "fa:16:3e:db:88:25", "network": {"id": "0b2eba7d-f2f8-4db1-b935-e34edffa47b8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-335916832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c84f03a79784aa38a063e874dcc2c0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13d625c9-77ec-4edb-a56b-9f37a314cc39", "external-id": "nsx-vlan-transportzone-358", "segmentation_id": 358, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5464c95-05", "ovs_interfaceid": "b5464c95-056f-4730-8974-808275aae0f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2371.761087] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:db:88:25', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '13d625c9-77ec-4edb-a56b-9f37a314cc39', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b5464c95-056f-4730-8974-808275aae0f2', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2371.771369] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Creating folder: Project (2c84f03a79784aa38a063e874dcc2c0a). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2371.771899] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-02a0b62e-8bc5-44f4-bd51-b6fc9079c3d7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.783768] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Created folder: Project (2c84f03a79784aa38a063e874dcc2c0a) in parent group-v421118. [ 2371.784300] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Creating folder: Instances. Parent ref: group-v421421. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2371.784774] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dd6f20f8-c929-4fa8-a778-8d297bebafd5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.794071] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Created folder: Instances in parent group-v421421. [ 2371.794321] env[62684]: DEBUG oslo.service.loopingcall [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2371.794525] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2371.794736] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f9f33db7-87b3-467e-8273-b04acc58ddf0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.813826] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2371.813826] env[62684]: value = "task-2053820" [ 2371.813826] env[62684]: _type = "Task" [ 2371.813826] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2371.821806] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053820, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2371.955381] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Releasing lock "refresh_cache-d540b43f-5bf9-47df-b319-97a1bae7ffc0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2371.955858] env[62684]: DEBUG nova.compute.manager [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Instance network_info: |[{"id": "664c9b72-448f-42d5-bb01-db8d2006bcb8", "address": "fa:16:3e:3b:50:b3", "network": {"id": "0b2eba7d-f2f8-4db1-b935-e34edffa47b8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-335916832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c84f03a79784aa38a063e874dcc2c0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13d625c9-77ec-4edb-a56b-9f37a314cc39", "external-id": "nsx-vlan-transportzone-358", "segmentation_id": 358, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap664c9b72-44", "ovs_interfaceid": "664c9b72-448f-42d5-bb01-db8d2006bcb8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2371.956332] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3b:50:b3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '13d625c9-77ec-4edb-a56b-9f37a314cc39', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '664c9b72-448f-42d5-bb01-db8d2006bcb8', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2371.963884] env[62684]: DEBUG oslo.service.loopingcall [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2371.964128] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2371.964368] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-16091c71-1875-439f-af20-410bb6be6981 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.994709] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2371.994709] env[62684]: value = "task-2053821" [ 2371.994709] env[62684]: _type = "Task" [ 2371.994709] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2372.003775] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053821, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2372.039989] env[62684]: DEBUG nova.compute.manager [req-862036b2-2e29-4c92-853d-eeb4815843c5 req-26ccd089-cc24-43dd-827b-dbdc1fe85d15 service nova] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Received event network-changed-b5464c95-056f-4730-8974-808275aae0f2 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2372.040257] env[62684]: DEBUG nova.compute.manager [req-862036b2-2e29-4c92-853d-eeb4815843c5 req-26ccd089-cc24-43dd-827b-dbdc1fe85d15 service nova] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Refreshing instance network info cache due to event network-changed-b5464c95-056f-4730-8974-808275aae0f2. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2372.040401] env[62684]: DEBUG oslo_concurrency.lockutils [req-862036b2-2e29-4c92-853d-eeb4815843c5 req-26ccd089-cc24-43dd-827b-dbdc1fe85d15 service nova] Acquiring lock "refresh_cache-ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2372.040630] env[62684]: DEBUG oslo_concurrency.lockutils [req-862036b2-2e29-4c92-853d-eeb4815843c5 req-26ccd089-cc24-43dd-827b-dbdc1fe85d15 service nova] Acquired lock "refresh_cache-ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2372.040707] env[62684]: DEBUG nova.network.neutron [req-862036b2-2e29-4c92-853d-eeb4815843c5 req-26ccd089-cc24-43dd-827b-dbdc1fe85d15 service nova] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Refreshing network info cache for port b5464c95-056f-4730-8974-808275aae0f2 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2372.112209] env[62684]: DEBUG oslo_concurrency.lockutils [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Releasing lock "refresh_cache-7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2372.112579] env[62684]: DEBUG nova.compute.manager [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Instance network_info: |[{"id": "dee84b05-71ef-4a8b-8cfa-64eea8bf277e", "address": "fa:16:3e:8c:22:81", "network": {"id": "0b2eba7d-f2f8-4db1-b935-e34edffa47b8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-335916832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c84f03a79784aa38a063e874dcc2c0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13d625c9-77ec-4edb-a56b-9f37a314cc39", "external-id": "nsx-vlan-transportzone-358", "segmentation_id": 358, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdee84b05-71", "ovs_interfaceid": "dee84b05-71ef-4a8b-8cfa-64eea8bf277e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2372.113053] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8c:22:81', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '13d625c9-77ec-4edb-a56b-9f37a314cc39', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dee84b05-71ef-4a8b-8cfa-64eea8bf277e', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2372.120408] env[62684]: DEBUG oslo.service.loopingcall [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2372.120951] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2372.121236] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1c6cffa1-61e2-4b04-9f79-4c968dd03106 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.142268] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2372.142268] env[62684]: value = "task-2053822" [ 2372.142268] env[62684]: _type = "Task" [ 2372.142268] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2372.152159] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053822, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2372.323935] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053820, 'name': CreateVM_Task, 'duration_secs': 0.352553} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2372.324123] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2372.324811] env[62684]: DEBUG oslo_concurrency.lockutils [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2372.324992] env[62684]: DEBUG oslo_concurrency.lockutils [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2372.325358] env[62684]: DEBUG oslo_concurrency.lockutils [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2372.325628] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7333bdc-6fd4-42bc-ab46-f37fa642ff25 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.330117] env[62684]: DEBUG oslo_vmware.api [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Waiting for the task: (returnval){ [ 2372.330117] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ffda66-ed21-b265-8c5b-ee5b70205ad3" [ 2372.330117] env[62684]: _type = "Task" [ 2372.330117] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2372.339246] env[62684]: DEBUG oslo_vmware.api [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ffda66-ed21-b265-8c5b-ee5b70205ad3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2372.403827] env[62684]: DEBUG nova.compute.manager [req-77b53d20-ceae-4211-b9e7-efdbd0f0afb1 req-486fc598-fe05-49d7-96a0-e7b1040853ba service nova] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Received event network-changed-dee84b05-71ef-4a8b-8cfa-64eea8bf277e {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2372.404095] env[62684]: DEBUG nova.compute.manager [req-77b53d20-ceae-4211-b9e7-efdbd0f0afb1 req-486fc598-fe05-49d7-96a0-e7b1040853ba service nova] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Refreshing instance network info cache due to event network-changed-dee84b05-71ef-4a8b-8cfa-64eea8bf277e. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2372.404337] env[62684]: DEBUG oslo_concurrency.lockutils [req-77b53d20-ceae-4211-b9e7-efdbd0f0afb1 req-486fc598-fe05-49d7-96a0-e7b1040853ba service nova] Acquiring lock "refresh_cache-7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2372.404499] env[62684]: DEBUG oslo_concurrency.lockutils [req-77b53d20-ceae-4211-b9e7-efdbd0f0afb1 req-486fc598-fe05-49d7-96a0-e7b1040853ba service nova] Acquired lock "refresh_cache-7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2372.404668] env[62684]: DEBUG nova.network.neutron [req-77b53d20-ceae-4211-b9e7-efdbd0f0afb1 req-486fc598-fe05-49d7-96a0-e7b1040853ba service nova] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Refreshing network info cache for port dee84b05-71ef-4a8b-8cfa-64eea8bf277e {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2372.508017] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053821, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2372.653501] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053822, 'name': CreateVM_Task, 'duration_secs': 0.316404} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2372.653670] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2372.654292] env[62684]: DEBUG oslo_concurrency.lockutils [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2372.737992] env[62684]: DEBUG nova.network.neutron [req-862036b2-2e29-4c92-853d-eeb4815843c5 req-26ccd089-cc24-43dd-827b-dbdc1fe85d15 service nova] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Updated VIF entry in instance network info cache for port b5464c95-056f-4730-8974-808275aae0f2. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2372.738396] env[62684]: DEBUG nova.network.neutron [req-862036b2-2e29-4c92-853d-eeb4815843c5 req-26ccd089-cc24-43dd-827b-dbdc1fe85d15 service nova] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Updating instance_info_cache with network_info: [{"id": "b5464c95-056f-4730-8974-808275aae0f2", "address": "fa:16:3e:db:88:25", "network": {"id": "0b2eba7d-f2f8-4db1-b935-e34edffa47b8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-335916832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c84f03a79784aa38a063e874dcc2c0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13d625c9-77ec-4edb-a56b-9f37a314cc39", "external-id": "nsx-vlan-transportzone-358", "segmentation_id": 358, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5464c95-05", "ovs_interfaceid": "b5464c95-056f-4730-8974-808275aae0f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2372.841387] env[62684]: DEBUG oslo_vmware.api [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ffda66-ed21-b265-8c5b-ee5b70205ad3, 'name': SearchDatastore_Task, 'duration_secs': 0.009632} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2372.841714] env[62684]: DEBUG oslo_concurrency.lockutils [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2372.841959] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2372.842233] env[62684]: DEBUG oslo_concurrency.lockutils [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2372.842388] env[62684]: DEBUG oslo_concurrency.lockutils [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2372.842575] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2372.842875] env[62684]: DEBUG oslo_concurrency.lockutils [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2372.843228] env[62684]: DEBUG oslo_concurrency.lockutils [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2372.843436] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bb13c995-029d-46c2-b1ba-eb0a14144994 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.845227] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0206febe-560d-47b8-b528-e102f4c5e122 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.850361] env[62684]: DEBUG oslo_vmware.api [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Waiting for the task: (returnval){ [ 2372.850361] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c497a0-4b28-d82b-8175-daab799aee3d" [ 2372.850361] env[62684]: _type = "Task" [ 2372.850361] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2372.853984] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2372.854177] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2372.855202] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55752e59-110a-48c4-b4dc-2d555cc5c6b3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.859935] env[62684]: DEBUG oslo_vmware.api [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c497a0-4b28-d82b-8175-daab799aee3d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2372.862957] env[62684]: DEBUG oslo_vmware.api [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Waiting for the task: (returnval){ [ 2372.862957] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52525cf0-666b-6ee8-4af5-95431f42ca05" [ 2372.862957] env[62684]: _type = "Task" [ 2372.862957] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2372.870989] env[62684]: DEBUG oslo_vmware.api [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52525cf0-666b-6ee8-4af5-95431f42ca05, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2373.008654] env[62684]: DEBUG nova.compute.manager [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Stashing vm_state: active {{(pid=62684) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 2373.017596] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053821, 'name': CreateVM_Task, 'duration_secs': 0.570983} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2373.017831] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2373.018418] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2373.098611] env[62684]: DEBUG nova.network.neutron [req-77b53d20-ceae-4211-b9e7-efdbd0f0afb1 req-486fc598-fe05-49d7-96a0-e7b1040853ba service nova] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Updated VIF entry in instance network info cache for port dee84b05-71ef-4a8b-8cfa-64eea8bf277e. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2373.098997] env[62684]: DEBUG nova.network.neutron [req-77b53d20-ceae-4211-b9e7-efdbd0f0afb1 req-486fc598-fe05-49d7-96a0-e7b1040853ba service nova] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Updating instance_info_cache with network_info: [{"id": "dee84b05-71ef-4a8b-8cfa-64eea8bf277e", "address": "fa:16:3e:8c:22:81", "network": {"id": "0b2eba7d-f2f8-4db1-b935-e34edffa47b8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-335916832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c84f03a79784aa38a063e874dcc2c0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13d625c9-77ec-4edb-a56b-9f37a314cc39", "external-id": "nsx-vlan-transportzone-358", "segmentation_id": 358, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdee84b05-71", "ovs_interfaceid": "dee84b05-71ef-4a8b-8cfa-64eea8bf277e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2373.240787] env[62684]: DEBUG oslo_concurrency.lockutils [req-862036b2-2e29-4c92-853d-eeb4815843c5 req-26ccd089-cc24-43dd-827b-dbdc1fe85d15 service nova] Releasing lock "refresh_cache-ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2373.241192] env[62684]: DEBUG nova.compute.manager [req-862036b2-2e29-4c92-853d-eeb4815843c5 req-26ccd089-cc24-43dd-827b-dbdc1fe85d15 service nova] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Received event network-vif-plugged-664c9b72-448f-42d5-bb01-db8d2006bcb8 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2373.241409] env[62684]: DEBUG oslo_concurrency.lockutils [req-862036b2-2e29-4c92-853d-eeb4815843c5 req-26ccd089-cc24-43dd-827b-dbdc1fe85d15 service nova] Acquiring lock "d540b43f-5bf9-47df-b319-97a1bae7ffc0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2373.241747] env[62684]: DEBUG oslo_concurrency.lockutils [req-862036b2-2e29-4c92-853d-eeb4815843c5 req-26ccd089-cc24-43dd-827b-dbdc1fe85d15 service nova] Lock "d540b43f-5bf9-47df-b319-97a1bae7ffc0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2373.241970] env[62684]: DEBUG oslo_concurrency.lockutils [req-862036b2-2e29-4c92-853d-eeb4815843c5 req-26ccd089-cc24-43dd-827b-dbdc1fe85d15 service nova] Lock "d540b43f-5bf9-47df-b319-97a1bae7ffc0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2373.242205] env[62684]: DEBUG nova.compute.manager [req-862036b2-2e29-4c92-853d-eeb4815843c5 req-26ccd089-cc24-43dd-827b-dbdc1fe85d15 service nova] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] No waiting events found dispatching network-vif-plugged-664c9b72-448f-42d5-bb01-db8d2006bcb8 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2373.242394] env[62684]: WARNING nova.compute.manager [req-862036b2-2e29-4c92-853d-eeb4815843c5 req-26ccd089-cc24-43dd-827b-dbdc1fe85d15 service nova] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Received unexpected event network-vif-plugged-664c9b72-448f-42d5-bb01-db8d2006bcb8 for instance with vm_state building and task_state spawning. [ 2373.242573] env[62684]: DEBUG nova.compute.manager [req-862036b2-2e29-4c92-853d-eeb4815843c5 req-26ccd089-cc24-43dd-827b-dbdc1fe85d15 service nova] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Received event network-changed-664c9b72-448f-42d5-bb01-db8d2006bcb8 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2373.242738] env[62684]: DEBUG nova.compute.manager [req-862036b2-2e29-4c92-853d-eeb4815843c5 req-26ccd089-cc24-43dd-827b-dbdc1fe85d15 service nova] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Refreshing instance network info cache due to event network-changed-664c9b72-448f-42d5-bb01-db8d2006bcb8. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2373.243009] env[62684]: DEBUG oslo_concurrency.lockutils [req-862036b2-2e29-4c92-853d-eeb4815843c5 req-26ccd089-cc24-43dd-827b-dbdc1fe85d15 service nova] Acquiring lock "refresh_cache-d540b43f-5bf9-47df-b319-97a1bae7ffc0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2373.243192] env[62684]: DEBUG oslo_concurrency.lockutils [req-862036b2-2e29-4c92-853d-eeb4815843c5 req-26ccd089-cc24-43dd-827b-dbdc1fe85d15 service nova] Acquired lock "refresh_cache-d540b43f-5bf9-47df-b319-97a1bae7ffc0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2373.243391] env[62684]: DEBUG nova.network.neutron [req-862036b2-2e29-4c92-853d-eeb4815843c5 req-26ccd089-cc24-43dd-827b-dbdc1fe85d15 service nova] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Refreshing network info cache for port 664c9b72-448f-42d5-bb01-db8d2006bcb8 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2373.362230] env[62684]: DEBUG oslo_vmware.api [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c497a0-4b28-d82b-8175-daab799aee3d, 'name': SearchDatastore_Task, 'duration_secs': 0.009058} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2373.362529] env[62684]: DEBUG oslo_concurrency.lockutils [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2373.362764] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2373.362982] env[62684]: DEBUG oslo_concurrency.lockutils [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2373.363223] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2373.363565] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2373.363865] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c31eca81-360b-44d8-bc4d-76b8b3b7db8f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.371264] env[62684]: DEBUG oslo_vmware.api [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Waiting for the task: (returnval){ [ 2373.371264] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]526b41b1-59b9-1a70-ac4f-7af38da91b34" [ 2373.371264] env[62684]: _type = "Task" [ 2373.371264] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2373.374497] env[62684]: DEBUG oslo_vmware.api [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52525cf0-666b-6ee8-4af5-95431f42ca05, 'name': SearchDatastore_Task, 'duration_secs': 0.010881} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2373.377620] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f70f1833-c973-4f3e-877c-970bb88930bb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.385423] env[62684]: DEBUG oslo_vmware.api [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]526b41b1-59b9-1a70-ac4f-7af38da91b34, 'name': SearchDatastore_Task, 'duration_secs': 0.008997} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2373.386213] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2373.386450] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2373.386658] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2373.386929] env[62684]: DEBUG oslo_vmware.api [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Waiting for the task: (returnval){ [ 2373.386929] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f0cafa-1565-87c1-d991-4715b111aa21" [ 2373.386929] env[62684]: _type = "Task" [ 2373.386929] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2373.394310] env[62684]: DEBUG oslo_vmware.api [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f0cafa-1565-87c1-d991-4715b111aa21, 'name': SearchDatastore_Task, 'duration_secs': 0.008004} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2373.394534] env[62684]: DEBUG oslo_concurrency.lockutils [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2373.394772] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5/ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2373.395047] env[62684]: DEBUG oslo_concurrency.lockutils [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2373.395243] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2373.395453] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5b8536bf-48a9-4995-b6fc-98d08f7fe753 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.397351] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2e30cffc-1e25-415b-9b0a-cb07e2ebf3b5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.404236] env[62684]: DEBUG oslo_vmware.api [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Waiting for the task: (returnval){ [ 2373.404236] env[62684]: value = "task-2053823" [ 2373.404236] env[62684]: _type = "Task" [ 2373.404236] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2373.405284] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2373.405463] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2373.408635] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4d0bee1-ec91-4e4c-93f0-cdf6702d6670 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.415397] env[62684]: DEBUG oslo_vmware.api [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053823, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2373.416575] env[62684]: DEBUG oslo_vmware.api [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Waiting for the task: (returnval){ [ 2373.416575] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5296de91-3095-d290-0500-ab8cfecaa616" [ 2373.416575] env[62684]: _type = "Task" [ 2373.416575] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2373.424911] env[62684]: DEBUG oslo_vmware.api [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5296de91-3095-d290-0500-ab8cfecaa616, 'name': SearchDatastore_Task, 'duration_secs': 0.008249} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2373.425672] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5d2334c-6657-4bc4-bc51-1d241bd9905f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.430423] env[62684]: DEBUG oslo_vmware.api [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Waiting for the task: (returnval){ [ 2373.430423] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52571b23-e087-3cfd-adb8-bb6e4119ad42" [ 2373.430423] env[62684]: _type = "Task" [ 2373.430423] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2373.437874] env[62684]: DEBUG oslo_vmware.api [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52571b23-e087-3cfd-adb8-bb6e4119ad42, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2373.529257] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2373.529542] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2373.602239] env[62684]: DEBUG oslo_concurrency.lockutils [req-77b53d20-ceae-4211-b9e7-efdbd0f0afb1 req-486fc598-fe05-49d7-96a0-e7b1040853ba service nova] Releasing lock "refresh_cache-7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2373.915254] env[62684]: DEBUG oslo_vmware.api [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053823, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.445426} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2373.915254] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5/ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2373.915433] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2373.915556] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-778941e2-3d3b-46be-8ac1-06b1951695de {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.922217] env[62684]: DEBUG oslo_vmware.api [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Waiting for the task: (returnval){ [ 2373.922217] env[62684]: value = "task-2053825" [ 2373.922217] env[62684]: _type = "Task" [ 2373.922217] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2373.930819] env[62684]: DEBUG oslo_vmware.api [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053825, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2373.938999] env[62684]: DEBUG oslo_vmware.api [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52571b23-e087-3cfd-adb8-bb6e4119ad42, 'name': SearchDatastore_Task, 'duration_secs': 0.008671} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2373.939311] env[62684]: DEBUG oslo_concurrency.lockutils [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2373.939544] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d/7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2373.939820] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2373.940016] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2373.940246] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e0594cbc-e506-463e-9cd9-11db68966124 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.942090] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c50f8682-0a4b-4dd8-9d64-f901c92706a0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.947647] env[62684]: DEBUG oslo_vmware.api [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Waiting for the task: (returnval){ [ 2373.947647] env[62684]: value = "task-2053826" [ 2373.947647] env[62684]: _type = "Task" [ 2373.947647] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2373.951173] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2373.951354] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2373.952295] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c75b4ea3-c583-407e-894e-65a96b8ba811 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.957522] env[62684]: DEBUG nova.network.neutron [req-862036b2-2e29-4c92-853d-eeb4815843c5 req-26ccd089-cc24-43dd-827b-dbdc1fe85d15 service nova] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Updated VIF entry in instance network info cache for port 664c9b72-448f-42d5-bb01-db8d2006bcb8. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2373.957852] env[62684]: DEBUG nova.network.neutron [req-862036b2-2e29-4c92-853d-eeb4815843c5 req-26ccd089-cc24-43dd-827b-dbdc1fe85d15 service nova] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Updating instance_info_cache with network_info: [{"id": "664c9b72-448f-42d5-bb01-db8d2006bcb8", "address": "fa:16:3e:3b:50:b3", "network": {"id": "0b2eba7d-f2f8-4db1-b935-e34edffa47b8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-335916832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c84f03a79784aa38a063e874dcc2c0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13d625c9-77ec-4edb-a56b-9f37a314cc39", "external-id": "nsx-vlan-transportzone-358", "segmentation_id": 358, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap664c9b72-44", "ovs_interfaceid": "664c9b72-448f-42d5-bb01-db8d2006bcb8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2373.959028] env[62684]: DEBUG oslo_vmware.api [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053826, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2373.961736] env[62684]: DEBUG oslo_vmware.api [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Waiting for the task: (returnval){ [ 2373.961736] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]525fe428-38d9-4baa-1d1b-c082f447ad2b" [ 2373.961736] env[62684]: _type = "Task" [ 2373.961736] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2373.970439] env[62684]: DEBUG oslo_vmware.api [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]525fe428-38d9-4baa-1d1b-c082f447ad2b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2374.034682] env[62684]: INFO nova.compute.claims [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2374.431696] env[62684]: DEBUG oslo_vmware.api [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053825, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069413} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2374.432041] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2374.432882] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd042728-4914-499a-a601-6a4c8cee5349 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.454380] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Reconfiguring VM instance instance-00000073 to attach disk [datastore2] ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5/ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2374.454686] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-82f91e15-a225-4f81-aea5-bbe41ac87497 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.470884] env[62684]: DEBUG oslo_concurrency.lockutils [req-862036b2-2e29-4c92-853d-eeb4815843c5 req-26ccd089-cc24-43dd-827b-dbdc1fe85d15 service nova] Releasing lock "refresh_cache-d540b43f-5bf9-47df-b319-97a1bae7ffc0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2374.480047] env[62684]: DEBUG oslo_vmware.api [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]525fe428-38d9-4baa-1d1b-c082f447ad2b, 'name': SearchDatastore_Task, 'duration_secs': 0.016488} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2374.484055] env[62684]: DEBUG oslo_vmware.api [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Waiting for the task: (returnval){ [ 2374.484055] env[62684]: value = "task-2053827" [ 2374.484055] env[62684]: _type = "Task" [ 2374.484055] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2374.484259] env[62684]: DEBUG oslo_vmware.api [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053826, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2374.484463] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6eb25fa6-e22d-4272-a7fe-e271a87de1ee {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.491675] env[62684]: DEBUG oslo_vmware.api [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Waiting for the task: (returnval){ [ 2374.491675] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f1f9d9-f6ca-086b-a471-d5c141343877" [ 2374.491675] env[62684]: _type = "Task" [ 2374.491675] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2374.494535] env[62684]: DEBUG oslo_vmware.api [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053827, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2374.501341] env[62684]: DEBUG oslo_vmware.api [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f1f9d9-f6ca-086b-a471-d5c141343877, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2374.540224] env[62684]: INFO nova.compute.resource_tracker [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Updating resource usage from migration bab91836-d491-4348-8ab7-41452807a6c5 [ 2374.657983] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57f024b1-89c4-4230-8421-f32956b21f9d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.665689] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c43c3f07-c445-4e51-bc08-db7815279819 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.696756] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f3a3a45-22fc-4995-a7a7-cadde3acaa78 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.704023] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a898e93-ce93-47e4-9287-ff257d79466a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.717025] env[62684]: DEBUG nova.compute.provider_tree [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2374.770622] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c2c27761-34f6-4f58-bfe3-e63488c6ed04 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "f7b61e23-fe0d-41fb-9100-d07cd8cb2d04" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2374.770870] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c2c27761-34f6-4f58-bfe3-e63488c6ed04 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "f7b61e23-fe0d-41fb-9100-d07cd8cb2d04" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2374.771151] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c2c27761-34f6-4f58-bfe3-e63488c6ed04 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "f7b61e23-fe0d-41fb-9100-d07cd8cb2d04-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2374.771359] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c2c27761-34f6-4f58-bfe3-e63488c6ed04 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "f7b61e23-fe0d-41fb-9100-d07cd8cb2d04-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2374.771535] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c2c27761-34f6-4f58-bfe3-e63488c6ed04 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "f7b61e23-fe0d-41fb-9100-d07cd8cb2d04-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2374.773511] env[62684]: INFO nova.compute.manager [None req-c2c27761-34f6-4f58-bfe3-e63488c6ed04 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Terminating instance [ 2374.775178] env[62684]: DEBUG nova.compute.manager [None req-c2c27761-34f6-4f58-bfe3-e63488c6ed04 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2374.775377] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c2c27761-34f6-4f58-bfe3-e63488c6ed04 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2374.776159] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9e0e7bf-6a8f-4b52-a3e2-62b32808a7f0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.783791] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2c27761-34f6-4f58-bfe3-e63488c6ed04 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2374.784026] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3cfdaa24-5cc6-467c-ad77-3c7de3691e08 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.790918] env[62684]: DEBUG oslo_vmware.api [None req-c2c27761-34f6-4f58-bfe3-e63488c6ed04 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2374.790918] env[62684]: value = "task-2053828" [ 2374.790918] env[62684]: _type = "Task" [ 2374.790918] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2374.798057] env[62684]: DEBUG oslo_vmware.api [None req-c2c27761-34f6-4f58-bfe3-e63488c6ed04 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053828, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2374.964477] env[62684]: DEBUG oslo_vmware.api [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053826, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2374.996361] env[62684]: DEBUG oslo_vmware.api [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053827, 'name': ReconfigVM_Task, 'duration_secs': 0.275173} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2374.999727] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Reconfigured VM instance instance-00000073 to attach disk [datastore2] ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5/ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2375.000593] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-924ea729-04e9-435f-b20a-2b94f4444a6f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2375.007886] env[62684]: DEBUG oslo_vmware.api [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f1f9d9-f6ca-086b-a471-d5c141343877, 'name': SearchDatastore_Task, 'duration_secs': 0.009108} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2375.009157] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2375.009470] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] d540b43f-5bf9-47df-b319-97a1bae7ffc0/d540b43f-5bf9-47df-b319-97a1bae7ffc0.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2375.009900] env[62684]: DEBUG oslo_vmware.api [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Waiting for the task: (returnval){ [ 2375.009900] env[62684]: value = "task-2053829" [ 2375.009900] env[62684]: _type = "Task" [ 2375.009900] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2375.010141] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-09aecea9-f1b3-4837-a6cf-6b74571dd6cf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2375.020008] env[62684]: DEBUG oslo_vmware.api [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Waiting for the task: (returnval){ [ 2375.020008] env[62684]: value = "task-2053830" [ 2375.020008] env[62684]: _type = "Task" [ 2375.020008] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2375.023293] env[62684]: DEBUG oslo_vmware.api [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053829, 'name': Rename_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2375.030564] env[62684]: DEBUG oslo_vmware.api [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053830, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2375.220669] env[62684]: DEBUG nova.scheduler.client.report [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2375.301861] env[62684]: DEBUG oslo_vmware.api [None req-c2c27761-34f6-4f58-bfe3-e63488c6ed04 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053828, 'name': PowerOffVM_Task, 'duration_secs': 0.383648} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2375.302170] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2c27761-34f6-4f58-bfe3-e63488c6ed04 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2375.302399] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c2c27761-34f6-4f58-bfe3-e63488c6ed04 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2375.302693] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-480db27b-ef9f-48a2-82a3-a4f50ce84168 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2375.466693] env[62684]: DEBUG oslo_vmware.api [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053826, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.44248} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2375.466998] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d/7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2375.467230] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2375.467499] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ea0e4fb1-fd33-4a20-823f-388016221c1c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2375.475100] env[62684]: DEBUG oslo_vmware.api [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Waiting for the task: (returnval){ [ 2375.475100] env[62684]: value = "task-2053832" [ 2375.475100] env[62684]: _type = "Task" [ 2375.475100] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2375.482328] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c2c27761-34f6-4f58-bfe3-e63488c6ed04 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2375.482595] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c2c27761-34f6-4f58-bfe3-e63488c6ed04 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2375.482776] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2c27761-34f6-4f58-bfe3-e63488c6ed04 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Deleting the datastore file [datastore1] f7b61e23-fe0d-41fb-9100-d07cd8cb2d04 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2375.485817] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-59e82a5d-ad31-4c26-8594-0be9e0a28384 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2375.487735] env[62684]: DEBUG oslo_vmware.api [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053832, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2375.494417] env[62684]: DEBUG oslo_vmware.api [None req-c2c27761-34f6-4f58-bfe3-e63488c6ed04 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2375.494417] env[62684]: value = "task-2053833" [ 2375.494417] env[62684]: _type = "Task" [ 2375.494417] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2375.504410] env[62684]: DEBUG oslo_vmware.api [None req-c2c27761-34f6-4f58-bfe3-e63488c6ed04 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053833, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2375.522111] env[62684]: DEBUG oslo_vmware.api [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053829, 'name': Rename_Task, 'duration_secs': 0.157269} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2375.522421] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2375.522692] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1307ec3b-c940-4762-b5ea-f1f66278a835 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2375.534258] env[62684]: DEBUG oslo_vmware.api [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053830, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2375.535740] env[62684]: DEBUG oslo_vmware.api [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Waiting for the task: (returnval){ [ 2375.535740] env[62684]: value = "task-2053834" [ 2375.535740] env[62684]: _type = "Task" [ 2375.535740] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2375.544724] env[62684]: DEBUG oslo_vmware.api [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053834, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2375.728206] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.198s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2375.728447] env[62684]: INFO nova.compute.manager [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Migrating [ 2375.984754] env[62684]: DEBUG oslo_vmware.api [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053832, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2376.003499] env[62684]: DEBUG oslo_vmware.api [None req-c2c27761-34f6-4f58-bfe3-e63488c6ed04 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053833, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.251235} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2376.003749] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2c27761-34f6-4f58-bfe3-e63488c6ed04 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2376.003942] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c2c27761-34f6-4f58-bfe3-e63488c6ed04 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2376.004140] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c2c27761-34f6-4f58-bfe3-e63488c6ed04 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2376.004318] env[62684]: INFO nova.compute.manager [None req-c2c27761-34f6-4f58-bfe3-e63488c6ed04 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Took 1.23 seconds to destroy the instance on the hypervisor. [ 2376.004553] env[62684]: DEBUG oslo.service.loopingcall [None req-c2c27761-34f6-4f58-bfe3-e63488c6ed04 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2376.004761] env[62684]: DEBUG nova.compute.manager [-] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2376.004850] env[62684]: DEBUG nova.network.neutron [-] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2376.034178] env[62684]: DEBUG oslo_vmware.api [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053830, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.671772} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2376.034441] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] d540b43f-5bf9-47df-b319-97a1bae7ffc0/d540b43f-5bf9-47df-b319-97a1bae7ffc0.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2376.034657] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2376.034919] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-438e81fc-14f9-4ae5-9426-63833ceeda35 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2376.045231] env[62684]: DEBUG oslo_vmware.api [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053834, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2376.046449] env[62684]: DEBUG oslo_vmware.api [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Waiting for the task: (returnval){ [ 2376.046449] env[62684]: value = "task-2053835" [ 2376.046449] env[62684]: _type = "Task" [ 2376.046449] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2376.054024] env[62684]: DEBUG oslo_vmware.api [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053835, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2376.244269] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "refresh_cache-b88d9418-7e90-473e-bd9a-18bc398faad0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2376.244463] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquired lock "refresh_cache-b88d9418-7e90-473e-bd9a-18bc398faad0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2376.244657] env[62684]: DEBUG nova.network.neutron [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2376.485378] env[62684]: DEBUG nova.compute.manager [req-c24f06ba-0a19-4ca8-bcc2-11f333af3e95 req-faa368c2-3606-435b-bb09-3039938ea579 service nova] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Received event network-vif-deleted-7a61c618-de41-42f8-a8f4-ed5cf615ac72 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2376.486355] env[62684]: INFO nova.compute.manager [req-c24f06ba-0a19-4ca8-bcc2-11f333af3e95 req-faa368c2-3606-435b-bb09-3039938ea579 service nova] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Neutron deleted interface 7a61c618-de41-42f8-a8f4-ed5cf615ac72; detaching it from the instance and deleting it from the info cache [ 2376.486355] env[62684]: DEBUG nova.network.neutron [req-c24f06ba-0a19-4ca8-bcc2-11f333af3e95 req-faa368c2-3606-435b-bb09-3039938ea579 service nova] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2376.490092] env[62684]: DEBUG oslo_vmware.api [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053832, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.753435} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2376.490801] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2376.491509] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ed46914-b987-41a4-ac14-2bc826316a26 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2376.518446] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Reconfiguring VM instance instance-00000075 to attach disk [datastore2] 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d/7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2376.518446] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b8f02e84-bbf9-4c6e-89d7-c42842aa513d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2376.537428] env[62684]: DEBUG oslo_vmware.api [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Waiting for the task: (returnval){ [ 2376.537428] env[62684]: value = "task-2053836" [ 2376.537428] env[62684]: _type = "Task" [ 2376.537428] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2376.555738] env[62684]: DEBUG oslo_vmware.api [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053836, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2376.556015] env[62684]: DEBUG oslo_vmware.api [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053834, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2376.560851] env[62684]: DEBUG oslo_vmware.api [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053835, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067437} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2376.561168] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2376.565026] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67220adf-e8b2-4950-bcee-42897f52bc6b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2376.585490] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Reconfiguring VM instance instance-00000074 to attach disk [datastore2] d540b43f-5bf9-47df-b319-97a1bae7ffc0/d540b43f-5bf9-47df-b319-97a1bae7ffc0.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2376.585781] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2ca4c04d-a656-4548-b5b0-3b9258dc9bb2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2376.606228] env[62684]: DEBUG oslo_vmware.api [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Waiting for the task: (returnval){ [ 2376.606228] env[62684]: value = "task-2053837" [ 2376.606228] env[62684]: _type = "Task" [ 2376.606228] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2376.614414] env[62684]: DEBUG oslo_vmware.api [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053837, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2376.964996] env[62684]: DEBUG nova.network.neutron [-] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2376.991421] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4349932c-b1e3-435e-90ea-7cb5c7fde48f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2377.001842] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0426d23-5a3a-496d-a162-d488e398e970 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2377.011933] env[62684]: DEBUG nova.network.neutron [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Updating instance_info_cache with network_info: [{"id": "7e4b9e76-bf05-4ee7-b25c-922484094be0", "address": "fa:16:3e:03:9c:84", "network": {"id": "1751424b-54a9-4879-9f32-aa15a9bb632c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-120070593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "263c101fcc5e493789b79dfd1ba97cc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92e4d027-e755-417b-8eea-9a8f24b85140", "external-id": "nsx-vlan-transportzone-756", "segmentation_id": 756, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e4b9e76-bf", "ovs_interfaceid": "7e4b9e76-bf05-4ee7-b25c-922484094be0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2377.017264] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "8d22d555-f837-4eb3-9474-c1434649584e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2377.017490] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "8d22d555-f837-4eb3-9474-c1434649584e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2377.038132] env[62684]: DEBUG nova.compute.manager [req-c24f06ba-0a19-4ca8-bcc2-11f333af3e95 req-faa368c2-3606-435b-bb09-3039938ea579 service nova] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Detach interface failed, port_id=7a61c618-de41-42f8-a8f4-ed5cf615ac72, reason: Instance f7b61e23-fe0d-41fb-9100-d07cd8cb2d04 could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2377.051500] env[62684]: DEBUG oslo_vmware.api [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053836, 'name': ReconfigVM_Task, 'duration_secs': 0.294682} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2377.051740] env[62684]: DEBUG oslo_vmware.api [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053834, 'name': PowerOnVM_Task, 'duration_secs': 1.036122} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2377.051980] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Reconfigured VM instance instance-00000075 to attach disk [datastore2] 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d/7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2377.052629] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2377.052834] env[62684]: INFO nova.compute.manager [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Took 13.19 seconds to spawn the instance on the hypervisor. [ 2377.053030] env[62684]: DEBUG nova.compute.manager [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2377.053284] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-11489ac9-78a1-4ed1-adb4-d9887925cbeb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2377.055144] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abc510bf-1fc3-411d-a8c1-744dddde46f3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2377.064551] env[62684]: DEBUG oslo_vmware.api [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Waiting for the task: (returnval){ [ 2377.064551] env[62684]: value = "task-2053838" [ 2377.064551] env[62684]: _type = "Task" [ 2377.064551] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2377.071896] env[62684]: DEBUG oslo_vmware.api [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053838, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2377.116125] env[62684]: DEBUG oslo_vmware.api [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053837, 'name': ReconfigVM_Task, 'duration_secs': 0.296523} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2377.116481] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Reconfigured VM instance instance-00000074 to attach disk [datastore2] d540b43f-5bf9-47df-b319-97a1bae7ffc0/d540b43f-5bf9-47df-b319-97a1bae7ffc0.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2377.117170] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0cbae99c-5668-49f4-b53d-e14ee299269a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2377.123016] env[62684]: DEBUG oslo_vmware.api [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Waiting for the task: (returnval){ [ 2377.123016] env[62684]: value = "task-2053839" [ 2377.123016] env[62684]: _type = "Task" [ 2377.123016] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2377.131016] env[62684]: DEBUG oslo_vmware.api [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053839, 'name': Rename_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2377.468164] env[62684]: INFO nova.compute.manager [-] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Took 1.46 seconds to deallocate network for instance. [ 2377.517192] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Releasing lock "refresh_cache-b88d9418-7e90-473e-bd9a-18bc398faad0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2377.519042] env[62684]: DEBUG nova.compute.manager [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2377.574349] env[62684]: INFO nova.compute.manager [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Took 17.92 seconds to build instance. [ 2377.580718] env[62684]: DEBUG oslo_vmware.api [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053838, 'name': Rename_Task, 'duration_secs': 0.179023} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2377.580969] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2377.581253] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bdb419ce-ef35-4fec-a1b3-2e2713add969 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2377.587558] env[62684]: DEBUG oslo_vmware.api [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Waiting for the task: (returnval){ [ 2377.587558] env[62684]: value = "task-2053840" [ 2377.587558] env[62684]: _type = "Task" [ 2377.587558] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2377.595494] env[62684]: DEBUG oslo_vmware.api [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053840, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2377.633450] env[62684]: DEBUG oslo_vmware.api [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053839, 'name': Rename_Task, 'duration_secs': 0.155527} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2377.633770] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2377.634054] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-352e6e1d-4adb-4335-895a-2f6b27f698f4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2377.640221] env[62684]: DEBUG oslo_vmware.api [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Waiting for the task: (returnval){ [ 2377.640221] env[62684]: value = "task-2053841" [ 2377.640221] env[62684]: _type = "Task" [ 2377.640221] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2377.647712] env[62684]: DEBUG oslo_vmware.api [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053841, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2377.975531] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c2c27761-34f6-4f58-bfe3-e63488c6ed04 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2377.975892] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c2c27761-34f6-4f58-bfe3-e63488c6ed04 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2377.976190] env[62684]: DEBUG nova.objects.instance [None req-c2c27761-34f6-4f58-bfe3-e63488c6ed04 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lazy-loading 'resources' on Instance uuid f7b61e23-fe0d-41fb-9100-d07cd8cb2d04 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2378.042212] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2378.076394] env[62684]: DEBUG oslo_concurrency.lockutils [None req-78d3f698-2970-49c0-982c-3a94cdde0759 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Lock "ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.434s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2378.099466] env[62684]: DEBUG oslo_vmware.api [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053840, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2378.150690] env[62684]: DEBUG oslo_vmware.api [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053841, 'name': PowerOnVM_Task, 'duration_secs': 0.496235} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2378.151104] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2378.151360] env[62684]: INFO nova.compute.manager [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Took 12.11 seconds to spawn the instance on the hypervisor. [ 2378.151555] env[62684]: DEBUG nova.compute.manager [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2378.152339] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17d6c08b-9948-4b38-b0d7-72e67e5c0cde {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2378.301075] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2378.301277] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Cleaning up deleted instances {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 2378.598837] env[62684]: DEBUG oslo_vmware.api [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053840, 'name': PowerOnVM_Task, 'duration_secs': 0.698514} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2378.599132] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2378.599347] env[62684]: INFO nova.compute.manager [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Took 10.36 seconds to spawn the instance on the hypervisor. [ 2378.599536] env[62684]: DEBUG nova.compute.manager [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2378.600439] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-999ede96-6cbd-4e93-950b-01f3ca4098c5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2378.604156] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1976ec9d-fabc-4518-87d4-ccb503be51d0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2378.616262] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32d355a3-e2ba-427e-b285-30e15d38b9a0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2378.647366] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a17d9e3e-2d97-4605-9837-74fd8c7af46d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2378.655118] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a6c1ebd-70ed-4a09-a56b-31759914c9c7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2378.673538] env[62684]: DEBUG nova.compute.provider_tree [None req-c2c27761-34f6-4f58-bfe3-e63488c6ed04 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2378.678309] env[62684]: INFO nova.compute.manager [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Took 18.68 seconds to build instance. [ 2378.813999] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] There are 41 instances to clean {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 2378.814198] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 69d26980-f42d-4d35-8de3-a85d7a6f0a11] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2379.036730] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c89ebe4-c1dc-4bd1-8fc3-69d55ed18b75 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.055602] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Updating instance 'b88d9418-7e90-473e-bd9a-18bc398faad0' progress to 0 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2379.122425] env[62684]: INFO nova.compute.manager [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Took 18.79 seconds to build instance. [ 2379.177939] env[62684]: DEBUG nova.scheduler.client.report [None req-c2c27761-34f6-4f58-bfe3-e63488c6ed04 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2379.181614] env[62684]: DEBUG oslo_concurrency.lockutils [None req-ab704c48-2e7b-4d7b-a164-f6a682bea95a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Lock "d540b43f-5bf9-47df-b319-97a1bae7ffc0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.195s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2379.317672] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: a9965f71-e965-4144-a64a-6ee43ad20fc0] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2379.561479] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2379.561814] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8d010e09-552d-4f76-8b5e-221a62a5bb9b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.570156] env[62684]: DEBUG oslo_vmware.api [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2379.570156] env[62684]: value = "task-2053842" [ 2379.570156] env[62684]: _type = "Task" [ 2379.570156] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2379.577872] env[62684]: DEBUG oslo_vmware.api [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053842, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2379.624294] env[62684]: DEBUG oslo_concurrency.lockutils [None req-439d8f83-c4c4-4604-99b4-a831d67cf343 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Lock "7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.296s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2379.684107] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c2c27761-34f6-4f58-bfe3-e63488c6ed04 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.708s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2379.686217] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.644s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2379.687773] env[62684]: INFO nova.compute.claims [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2379.703642] env[62684]: INFO nova.scheduler.client.report [None req-c2c27761-34f6-4f58-bfe3-e63488c6ed04 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Deleted allocations for instance f7b61e23-fe0d-41fb-9100-d07cd8cb2d04 [ 2379.822390] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: b262673b-e4d3-48d8-9f93-6c60d48ae29d] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2380.080702] env[62684]: DEBUG oslo_vmware.api [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053842, 'name': PowerOffVM_Task, 'duration_secs': 0.243334} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2380.080983] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2380.081227] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Updating instance 'b88d9418-7e90-473e-bd9a-18bc398faad0' progress to 17 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2380.212768] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c2c27761-34f6-4f58-bfe3-e63488c6ed04 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "f7b61e23-fe0d-41fb-9100-d07cd8cb2d04" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.442s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2380.325972] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: c87b2875-ae05-4091-93fe-7b33d4ca864b] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2380.587443] env[62684]: DEBUG nova.virt.hardware [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2380.587714] env[62684]: DEBUG nova.virt.hardware [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2380.587880] env[62684]: DEBUG nova.virt.hardware [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2380.588083] env[62684]: DEBUG nova.virt.hardware [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2380.588240] env[62684]: DEBUG nova.virt.hardware [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2380.588397] env[62684]: DEBUG nova.virt.hardware [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2380.588616] env[62684]: DEBUG nova.virt.hardware [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2380.588783] env[62684]: DEBUG nova.virt.hardware [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2380.588954] env[62684]: DEBUG nova.virt.hardware [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2380.589188] env[62684]: DEBUG nova.virt.hardware [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2380.589385] env[62684]: DEBUG nova.virt.hardware [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2380.594657] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b34b4082-12fb-4761-ac6b-8fc1d8e68c3e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.611928] env[62684]: DEBUG oslo_vmware.api [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2380.611928] env[62684]: value = "task-2053843" [ 2380.611928] env[62684]: _type = "Task" [ 2380.611928] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2380.620946] env[62684]: DEBUG oslo_vmware.api [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053843, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2380.829632] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 68ed9549-14ab-4f90-bd78-925f289dc029] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2380.853105] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62ce4daa-811a-4d6a-8408-c80ec8fb6a9f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.863912] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ed897b2-c809-44ac-831d-ece2ea7c5e94 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.898090] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8bf06ef-6d8e-4088-8169-ebb1e41b9dca {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.905608] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7ec638c-cbc8-4d7a-802a-f0b5fa5e91a3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.920428] env[62684]: DEBUG nova.compute.provider_tree [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2381.126275] env[62684]: DEBUG oslo_vmware.api [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053843, 'name': ReconfigVM_Task, 'duration_secs': 0.212492} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2381.126716] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Updating instance 'b88d9418-7e90-473e-bd9a-18bc398faad0' progress to 33 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2381.246150] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d2f90531-f272-42c0-be05-9322dde93f4a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Acquiring lock "ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2381.246574] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d2f90531-f272-42c0-be05-9322dde93f4a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Lock "ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2381.246781] env[62684]: DEBUG nova.compute.manager [None req-d2f90531-f272-42c0-be05-9322dde93f4a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2381.247737] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1570981-5d94-43ff-9c1d-39e636204049 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.254697] env[62684]: DEBUG nova.compute.manager [None req-d2f90531-f272-42c0-be05-9322dde93f4a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62684) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 2381.255455] env[62684]: DEBUG nova.objects.instance [None req-d2f90531-f272-42c0-be05-9322dde93f4a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Lazy-loading 'flavor' on Instance uuid ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2381.332333] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 0bb25bfe-bc00-4da8-9e8b-da0b853f5e9d] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2381.424070] env[62684]: DEBUG nova.scheduler.client.report [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2381.635270] env[62684]: DEBUG nova.virt.hardware [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2381.635498] env[62684]: DEBUG nova.virt.hardware [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2381.635608] env[62684]: DEBUG nova.virt.hardware [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2381.635901] env[62684]: DEBUG nova.virt.hardware [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2381.636105] env[62684]: DEBUG nova.virt.hardware [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2381.636277] env[62684]: DEBUG nova.virt.hardware [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2381.636491] env[62684]: DEBUG nova.virt.hardware [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2381.636654] env[62684]: DEBUG nova.virt.hardware [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2381.636858] env[62684]: DEBUG nova.virt.hardware [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2381.637053] env[62684]: DEBUG nova.virt.hardware [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2381.637242] env[62684]: DEBUG nova.virt.hardware [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2381.642772] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Reconfiguring VM instance instance-00000070 to detach disk 2000 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2381.643086] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0a354a33-5f0b-4169-bbf8-2d8860daebf3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.660979] env[62684]: DEBUG oslo_vmware.api [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2381.660979] env[62684]: value = "task-2053844" [ 2381.660979] env[62684]: _type = "Task" [ 2381.660979] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2381.668738] env[62684]: DEBUG oslo_vmware.api [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053844, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2381.762550] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2f90531-f272-42c0-be05-9322dde93f4a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2381.762771] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-989405e4-1607-497f-99e1-cedc35a5a9f8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.769843] env[62684]: DEBUG oslo_vmware.api [None req-d2f90531-f272-42c0-be05-9322dde93f4a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Waiting for the task: (returnval){ [ 2381.769843] env[62684]: value = "task-2053845" [ 2381.769843] env[62684]: _type = "Task" [ 2381.769843] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2381.778294] env[62684]: DEBUG oslo_vmware.api [None req-d2f90531-f272-42c0-be05-9322dde93f4a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053845, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2381.840268] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 9f1e9ae9-c082-4fbe-bd21-6e14e40962c1] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2381.842535] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5faaf3ec-084e-4d4f-bf62-70d4e9fd6c5e tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "5b3668f3-219d-4304-bc9e-9b911762085d" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2381.842772] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5faaf3ec-084e-4d4f-bf62-70d4e9fd6c5e tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "5b3668f3-219d-4304-bc9e-9b911762085d" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2381.928476] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.242s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2381.928880] env[62684]: DEBUG nova.compute.manager [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2382.170734] env[62684]: DEBUG oslo_vmware.api [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053844, 'name': ReconfigVM_Task, 'duration_secs': 0.158153} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2382.170968] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Reconfigured VM instance instance-00000070 to detach disk 2000 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2382.171756] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20624395-88a9-4c25-b799-374a105a0238 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.192940] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] b88d9418-7e90-473e-bd9a-18bc398faad0/b88d9418-7e90-473e-bd9a-18bc398faad0.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2382.193196] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d34bf08b-4a36-490d-aec9-e975be08e3a2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.210345] env[62684]: DEBUG oslo_vmware.api [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2382.210345] env[62684]: value = "task-2053846" [ 2382.210345] env[62684]: _type = "Task" [ 2382.210345] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2382.219094] env[62684]: DEBUG oslo_vmware.api [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053846, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2382.279647] env[62684]: DEBUG oslo_vmware.api [None req-d2f90531-f272-42c0-be05-9322dde93f4a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053845, 'name': PowerOffVM_Task, 'duration_secs': 0.181392} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2382.279902] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2f90531-f272-42c0-be05-9322dde93f4a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2382.280099] env[62684]: DEBUG nova.compute.manager [None req-d2f90531-f272-42c0-be05-9322dde93f4a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2382.280962] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e21ce01-3c2f-40fc-b4d5-750344e2a210 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.344835] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 587edf89-2ea0-4b89-8830-fa766b798398] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2382.347042] env[62684]: INFO nova.compute.manager [None req-5faaf3ec-084e-4d4f-bf62-70d4e9fd6c5e tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Detaching volume a930b2fe-6aed-4e27-967f-d8cfdc8a6d1b [ 2382.380433] env[62684]: INFO nova.virt.block_device [None req-5faaf3ec-084e-4d4f-bf62-70d4e9fd6c5e tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Attempting to driver detach volume a930b2fe-6aed-4e27-967f-d8cfdc8a6d1b from mountpoint /dev/sdb [ 2382.380693] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-5faaf3ec-084e-4d4f-bf62-70d4e9fd6c5e tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Volume detach. Driver type: vmdk {{(pid=62684) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2382.380889] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-5faaf3ec-084e-4d4f-bf62-70d4e9fd6c5e tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421415', 'volume_id': 'a930b2fe-6aed-4e27-967f-d8cfdc8a6d1b', 'name': 'volume-a930b2fe-6aed-4e27-967f-d8cfdc8a6d1b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5b3668f3-219d-4304-bc9e-9b911762085d', 'attached_at': '', 'detached_at': '', 'volume_id': 'a930b2fe-6aed-4e27-967f-d8cfdc8a6d1b', 'serial': 'a930b2fe-6aed-4e27-967f-d8cfdc8a6d1b'} {{(pid=62684) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2382.381876] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e3c2190-874e-4e8d-8dd4-209ca878912b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.403632] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5a57480-88c7-4d2d-b9b0-ea16ad22a970 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.410384] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7e7f77f-bba8-476b-8034-dac4c9e7db62 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.429640] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0aefc61-5486-46fc-a9a9-fe91d789ff27 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.433395] env[62684]: DEBUG nova.compute.utils [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2382.444488] env[62684]: DEBUG nova.compute.manager [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2382.444656] env[62684]: DEBUG nova.network.neutron [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2382.447133] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-5faaf3ec-084e-4d4f-bf62-70d4e9fd6c5e tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] The volume has not been displaced from its original location: [datastore2] volume-a930b2fe-6aed-4e27-967f-d8cfdc8a6d1b/volume-a930b2fe-6aed-4e27-967f-d8cfdc8a6d1b.vmdk. No consolidation needed. {{(pid=62684) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2382.452401] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-5faaf3ec-084e-4d4f-bf62-70d4e9fd6c5e tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Reconfiguring VM instance instance-0000006c to detach disk 2001 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2382.453254] env[62684]: DEBUG nova.compute.manager [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2382.456064] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b30d51c7-8f03-4db3-a9cb-8b6fb800370f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.475743] env[62684]: DEBUG oslo_vmware.api [None req-5faaf3ec-084e-4d4f-bf62-70d4e9fd6c5e tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2382.475743] env[62684]: value = "task-2053847" [ 2382.475743] env[62684]: _type = "Task" [ 2382.475743] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2382.484373] env[62684]: DEBUG oslo_vmware.api [None req-5faaf3ec-084e-4d4f-bf62-70d4e9fd6c5e tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053847, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2382.513875] env[62684]: DEBUG nova.policy [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2fab3230b61d440e93d1d0a975115405', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '27d04006afc747e19ad87238bfdbaad1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2382.720327] env[62684]: DEBUG oslo_vmware.api [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053846, 'name': ReconfigVM_Task, 'duration_secs': 0.264464} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2382.720614] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Reconfigured VM instance instance-00000070 to attach disk [datastore1] b88d9418-7e90-473e-bd9a-18bc398faad0/b88d9418-7e90-473e-bd9a-18bc398faad0.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2382.720903] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Updating instance 'b88d9418-7e90-473e-bd9a-18bc398faad0' progress to 50 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2382.776760] env[62684]: DEBUG nova.network.neutron [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Successfully created port: 83b1b299-d863-45f1-9a11-2ffa0e2bd291 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2382.792940] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d2f90531-f272-42c0-be05-9322dde93f4a tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Lock "ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.546s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2382.850217] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 58e67d8e-900e-4d22-a4fd-fe493758d4f2] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2382.970518] env[62684]: INFO nova.virt.block_device [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Booting with volume c148a6a9-f313-4a0d-8466-261a9903a3c8 at /dev/sda [ 2382.992198] env[62684]: DEBUG oslo_vmware.api [None req-5faaf3ec-084e-4d4f-bf62-70d4e9fd6c5e tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053847, 'name': ReconfigVM_Task, 'duration_secs': 0.221682} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2382.992499] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-5faaf3ec-084e-4d4f-bf62-70d4e9fd6c5e tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Reconfigured VM instance instance-0000006c to detach disk 2001 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2383.001844] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c5040590-1fc1-4f51-8623-a484dcaf7487 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2383.013038] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9831c466-6a50-4aa2-bb52-7a3f7344de41 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2383.022577] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f33a7da8-7f81-4084-ad94-05ff434d4bef {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2383.033707] env[62684]: DEBUG oslo_vmware.api [None req-5faaf3ec-084e-4d4f-bf62-70d4e9fd6c5e tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2383.033707] env[62684]: value = "task-2053848" [ 2383.033707] env[62684]: _type = "Task" [ 2383.033707] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2383.041855] env[62684]: DEBUG oslo_vmware.api [None req-5faaf3ec-084e-4d4f-bf62-70d4e9fd6c5e tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053848, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2383.054440] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b4fe2ffe-4b60-41db-bc10-65a2f8082e68 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2383.064125] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43fd1f16-3507-4976-8621-4dc6b9938c28 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2383.093526] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4c7f6e6-544f-4ccc-bb6c-8771dbf5be83 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2383.100151] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e8e85a6-84c4-43dd-8490-2fee35010e0a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2383.115425] env[62684]: DEBUG nova.virt.block_device [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Updating existing volume attachment record: 1ce0aa6b-86b2-4b40-b58a-80f21d0e8378 {{(pid=62684) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2383.227646] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37b53344-6e63-4cdb-87e6-7673d1a79fda {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2383.248288] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca129761-b1dc-4050-986d-631c1f96a344 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2383.265811] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Updating instance 'b88d9418-7e90-473e-bd9a-18bc398faad0' progress to 67 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2383.353464] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 3ff55331-6d5c-4558-b932-e266670f2ac9] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2383.543955] env[62684]: DEBUG oslo_vmware.api [None req-5faaf3ec-084e-4d4f-bf62-70d4e9fd6c5e tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053848, 'name': ReconfigVM_Task, 'duration_secs': 0.141471} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2383.544242] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-5faaf3ec-084e-4d4f-bf62-70d4e9fd6c5e tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421415', 'volume_id': 'a930b2fe-6aed-4e27-967f-d8cfdc8a6d1b', 'name': 'volume-a930b2fe-6aed-4e27-967f-d8cfdc8a6d1b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5b3668f3-219d-4304-bc9e-9b911762085d', 'attached_at': '', 'detached_at': '', 'volume_id': 'a930b2fe-6aed-4e27-967f-d8cfdc8a6d1b', 'serial': 'a930b2fe-6aed-4e27-967f-d8cfdc8a6d1b'} {{(pid=62684) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2383.776249] env[62684]: DEBUG nova.objects.instance [None req-d0c2c88e-7a42-49a4-91d1-ae22a314cb7c tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Lazy-loading 'flavor' on Instance uuid ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2383.809591] env[62684]: DEBUG nova.network.neutron [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Port 7e4b9e76-bf05-4ee7-b25c-922484094be0 binding to destination host cpu-1 is already ACTIVE {{(pid=62684) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2383.856179] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: ba12fa9a-10e3-4624-98b5-4ff7365e1940] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2384.087102] env[62684]: DEBUG nova.objects.instance [None req-5faaf3ec-084e-4d4f-bf62-70d4e9fd6c5e tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lazy-loading 'flavor' on Instance uuid 5b3668f3-219d-4304-bc9e-9b911762085d {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2384.153740] env[62684]: DEBUG nova.compute.manager [req-6e3d2fed-cbbd-4924-b27e-210c1f826344 req-7960444a-8345-47f8-8445-eb2ea68f27cf service nova] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Received event network-vif-plugged-83b1b299-d863-45f1-9a11-2ffa0e2bd291 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2384.153980] env[62684]: DEBUG oslo_concurrency.lockutils [req-6e3d2fed-cbbd-4924-b27e-210c1f826344 req-7960444a-8345-47f8-8445-eb2ea68f27cf service nova] Acquiring lock "8d22d555-f837-4eb3-9474-c1434649584e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2384.154217] env[62684]: DEBUG oslo_concurrency.lockutils [req-6e3d2fed-cbbd-4924-b27e-210c1f826344 req-7960444a-8345-47f8-8445-eb2ea68f27cf service nova] Lock "8d22d555-f837-4eb3-9474-c1434649584e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2384.154393] env[62684]: DEBUG oslo_concurrency.lockutils [req-6e3d2fed-cbbd-4924-b27e-210c1f826344 req-7960444a-8345-47f8-8445-eb2ea68f27cf service nova] Lock "8d22d555-f837-4eb3-9474-c1434649584e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2384.154564] env[62684]: DEBUG nova.compute.manager [req-6e3d2fed-cbbd-4924-b27e-210c1f826344 req-7960444a-8345-47f8-8445-eb2ea68f27cf service nova] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] No waiting events found dispatching network-vif-plugged-83b1b299-d863-45f1-9a11-2ffa0e2bd291 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2384.154748] env[62684]: WARNING nova.compute.manager [req-6e3d2fed-cbbd-4924-b27e-210c1f826344 req-7960444a-8345-47f8-8445-eb2ea68f27cf service nova] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Received unexpected event network-vif-plugged-83b1b299-d863-45f1-9a11-2ffa0e2bd291 for instance with vm_state building and task_state block_device_mapping. [ 2384.247231] env[62684]: DEBUG nova.network.neutron [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Successfully updated port: 83b1b299-d863-45f1-9a11-2ffa0e2bd291 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2384.281589] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d0c2c88e-7a42-49a4-91d1-ae22a314cb7c tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Acquiring lock "refresh_cache-ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2384.281791] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d0c2c88e-7a42-49a4-91d1-ae22a314cb7c tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Acquired lock "refresh_cache-ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2384.282065] env[62684]: DEBUG nova.network.neutron [None req-d0c2c88e-7a42-49a4-91d1-ae22a314cb7c tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2384.282329] env[62684]: DEBUG nova.objects.instance [None req-d0c2c88e-7a42-49a4-91d1-ae22a314cb7c tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Lazy-loading 'info_cache' on Instance uuid ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2384.359742] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 264c6900-dbef-455e-95cc-1df73c735cc8] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2384.752547] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "refresh_cache-8d22d555-f837-4eb3-9474-c1434649584e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2384.752547] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquired lock "refresh_cache-8d22d555-f837-4eb3-9474-c1434649584e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2384.752547] env[62684]: DEBUG nova.network.neutron [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2384.785135] env[62684]: DEBUG nova.objects.base [None req-d0c2c88e-7a42-49a4-91d1-ae22a314cb7c tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=62684) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2384.825389] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "b88d9418-7e90-473e-bd9a-18bc398faad0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2384.825622] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "b88d9418-7e90-473e-bd9a-18bc398faad0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2384.825798] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "b88d9418-7e90-473e-bd9a-18bc398faad0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2384.862759] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: f4fab142-8066-43c1-abaa-a9f66775114c] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2385.094158] env[62684]: DEBUG oslo_concurrency.lockutils [None req-5faaf3ec-084e-4d4f-bf62-70d4e9fd6c5e tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "5b3668f3-219d-4304-bc9e-9b911762085d" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.251s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2385.196993] env[62684]: DEBUG nova.compute.manager [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2385.197590] env[62684]: DEBUG nova.virt.hardware [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2385.197816] env[62684]: DEBUG nova.virt.hardware [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2385.197992] env[62684]: DEBUG nova.virt.hardware [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2385.198206] env[62684]: DEBUG nova.virt.hardware [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2385.198361] env[62684]: DEBUG nova.virt.hardware [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2385.198517] env[62684]: DEBUG nova.virt.hardware [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2385.198724] env[62684]: DEBUG nova.virt.hardware [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2385.198890] env[62684]: DEBUG nova.virt.hardware [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2385.199112] env[62684]: DEBUG nova.virt.hardware [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2385.199340] env[62684]: DEBUG nova.virt.hardware [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2385.199543] env[62684]: DEBUG nova.virt.hardware [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2385.200444] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7ddf296-b263-4534-8a48-4e4f74f43d0f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2385.208580] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ba43c7d-2086-4589-a937-e31dd226274b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2385.283277] env[62684]: DEBUG nova.network.neutron [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2385.366250] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 99a9653c-7221-4495-be5f-5441dc8da0f4] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2385.407647] env[62684]: DEBUG nova.network.neutron [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Updating instance_info_cache with network_info: [{"id": "83b1b299-d863-45f1-9a11-2ffa0e2bd291", "address": "fa:16:3e:4f:09:7f", "network": {"id": "e177c6d0-ddd5-4029-94af-c8f1b937dd9f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1344612161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27d04006afc747e19ad87238bfdbaad1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "171aeae0-6a27-44fc-bc3d-a2d5581fc702", "external-id": "nsx-vlan-transportzone-410", "segmentation_id": 410, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83b1b299-d8", "ovs_interfaceid": "83b1b299-d863-45f1-9a11-2ffa0e2bd291", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2385.513550] env[62684]: DEBUG nova.network.neutron [None req-d0c2c88e-7a42-49a4-91d1-ae22a314cb7c tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Updating instance_info_cache with network_info: [{"id": "b5464c95-056f-4730-8974-808275aae0f2", "address": "fa:16:3e:db:88:25", "network": {"id": "0b2eba7d-f2f8-4db1-b935-e34edffa47b8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-335916832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c84f03a79784aa38a063e874dcc2c0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13d625c9-77ec-4edb-a56b-9f37a314cc39", "external-id": "nsx-vlan-transportzone-358", "segmentation_id": 358, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5464c95-05", "ovs_interfaceid": "b5464c95-056f-4730-8974-808275aae0f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2385.866908] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "refresh_cache-b88d9418-7e90-473e-bd9a-18bc398faad0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2385.867246] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquired lock "refresh_cache-b88d9418-7e90-473e-bd9a-18bc398faad0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2385.867475] env[62684]: DEBUG nova.network.neutron [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2385.870830] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 584845d2-d146-42bf-8ef5-58532fe24f65] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2385.910351] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Releasing lock "refresh_cache-8d22d555-f837-4eb3-9474-c1434649584e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2385.910658] env[62684]: DEBUG nova.compute.manager [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Instance network_info: |[{"id": "83b1b299-d863-45f1-9a11-2ffa0e2bd291", "address": "fa:16:3e:4f:09:7f", "network": {"id": "e177c6d0-ddd5-4029-94af-c8f1b937dd9f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1344612161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27d04006afc747e19ad87238bfdbaad1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "171aeae0-6a27-44fc-bc3d-a2d5581fc702", "external-id": "nsx-vlan-transportzone-410", "segmentation_id": 410, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83b1b299-d8", "ovs_interfaceid": "83b1b299-d863-45f1-9a11-2ffa0e2bd291", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2385.911085] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4f:09:7f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '171aeae0-6a27-44fc-bc3d-a2d5581fc702', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '83b1b299-d863-45f1-9a11-2ffa0e2bd291', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2385.918382] env[62684]: DEBUG oslo.service.loopingcall [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2385.918585] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2385.918811] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-98261dd0-8bae-4d67-a55d-c9f3f4d1403f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2385.939046] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2385.939046] env[62684]: value = "task-2053849" [ 2385.939046] env[62684]: _type = "Task" [ 2385.939046] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2385.946673] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053849, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2386.017027] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d0c2c88e-7a42-49a4-91d1-ae22a314cb7c tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Releasing lock "refresh_cache-ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2386.089614] env[62684]: DEBUG oslo_concurrency.lockutils [None req-89ea5ece-b6d6-4904-8c37-6c8ffc2033fd tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "5b3668f3-219d-4304-bc9e-9b911762085d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2386.089883] env[62684]: DEBUG oslo_concurrency.lockutils [None req-89ea5ece-b6d6-4904-8c37-6c8ffc2033fd tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "5b3668f3-219d-4304-bc9e-9b911762085d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2386.090122] env[62684]: DEBUG oslo_concurrency.lockutils [None req-89ea5ece-b6d6-4904-8c37-6c8ffc2033fd tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "5b3668f3-219d-4304-bc9e-9b911762085d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2386.090317] env[62684]: DEBUG oslo_concurrency.lockutils [None req-89ea5ece-b6d6-4904-8c37-6c8ffc2033fd tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "5b3668f3-219d-4304-bc9e-9b911762085d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2386.090493] env[62684]: DEBUG oslo_concurrency.lockutils [None req-89ea5ece-b6d6-4904-8c37-6c8ffc2033fd tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "5b3668f3-219d-4304-bc9e-9b911762085d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2386.092614] env[62684]: INFO nova.compute.manager [None req-89ea5ece-b6d6-4904-8c37-6c8ffc2033fd tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Terminating instance [ 2386.094350] env[62684]: DEBUG nova.compute.manager [None req-89ea5ece-b6d6-4904-8c37-6c8ffc2033fd tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2386.094553] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-89ea5ece-b6d6-4904-8c37-6c8ffc2033fd tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2386.095381] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c18a0eb-604a-485c-93e6-02b38d180bbd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2386.103351] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-89ea5ece-b6d6-4904-8c37-6c8ffc2033fd tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2386.103546] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9701d6a5-b4e3-4362-bf69-1ecd2947b1b6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2386.109838] env[62684]: DEBUG oslo_vmware.api [None req-89ea5ece-b6d6-4904-8c37-6c8ffc2033fd tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2386.109838] env[62684]: value = "task-2053850" [ 2386.109838] env[62684]: _type = "Task" [ 2386.109838] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2386.117818] env[62684]: DEBUG oslo_vmware.api [None req-89ea5ece-b6d6-4904-8c37-6c8ffc2033fd tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053850, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2386.179598] env[62684]: DEBUG nova.compute.manager [req-db569d45-e46d-4e45-bcc2-07da3fedfe0f req-22359745-93da-4a6e-b9f5-8bfdc1881e60 service nova] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Received event network-changed-83b1b299-d863-45f1-9a11-2ffa0e2bd291 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2386.179904] env[62684]: DEBUG nova.compute.manager [req-db569d45-e46d-4e45-bcc2-07da3fedfe0f req-22359745-93da-4a6e-b9f5-8bfdc1881e60 service nova] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Refreshing instance network info cache due to event network-changed-83b1b299-d863-45f1-9a11-2ffa0e2bd291. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2386.180353] env[62684]: DEBUG oslo_concurrency.lockutils [req-db569d45-e46d-4e45-bcc2-07da3fedfe0f req-22359745-93da-4a6e-b9f5-8bfdc1881e60 service nova] Acquiring lock "refresh_cache-8d22d555-f837-4eb3-9474-c1434649584e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2386.180584] env[62684]: DEBUG oslo_concurrency.lockutils [req-db569d45-e46d-4e45-bcc2-07da3fedfe0f req-22359745-93da-4a6e-b9f5-8bfdc1881e60 service nova] Acquired lock "refresh_cache-8d22d555-f837-4eb3-9474-c1434649584e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2386.180828] env[62684]: DEBUG nova.network.neutron [req-db569d45-e46d-4e45-bcc2-07da3fedfe0f req-22359745-93da-4a6e-b9f5-8bfdc1881e60 service nova] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Refreshing network info cache for port 83b1b299-d863-45f1-9a11-2ffa0e2bd291 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2386.374377] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: d7f09d0e-f7b6-415e-8d82-47eba1153aa1] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2386.451491] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053849, 'name': CreateVM_Task, 'duration_secs': 0.405322} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2386.451673] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2386.452491] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': True, 'disk_bus': None, 'boot_index': 0, 'device_type': None, 'attachment_id': '1ce0aa6b-86b2-4b40-b58a-80f21d0e8378', 'guest_format': None, 'mount_device': '/dev/sda', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421420', 'volume_id': 'c148a6a9-f313-4a0d-8466-261a9903a3c8', 'name': 'volume-c148a6a9-f313-4a0d-8466-261a9903a3c8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8d22d555-f837-4eb3-9474-c1434649584e', 'attached_at': '', 'detached_at': '', 'volume_id': 'c148a6a9-f313-4a0d-8466-261a9903a3c8', 'serial': 'c148a6a9-f313-4a0d-8466-261a9903a3c8'}, 'volume_type': None}], 'swap': None} {{(pid=62684) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 2386.452740] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Root volume attach. Driver type: vmdk {{(pid=62684) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 2386.453612] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6921bc5e-1ae9-46af-b87c-6e7051cc63d4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2386.461084] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a60cf8b0-4251-49b0-9519-222020b830aa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2386.467514] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-131cdfc5-3337-4470-a388-615587d5add6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2386.474059] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-88717e86-e788-42c6-8c46-cdf8f53de715 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2386.480475] env[62684]: DEBUG oslo_vmware.api [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2386.480475] env[62684]: value = "task-2053851" [ 2386.480475] env[62684]: _type = "Task" [ 2386.480475] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2386.489357] env[62684]: DEBUG oslo_vmware.api [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053851, 'name': RelocateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2386.520724] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0c2c88e-7a42-49a4-91d1-ae22a314cb7c tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2386.521066] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5bada8c3-0f94-4376-8254-0df6069054a1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2386.529584] env[62684]: DEBUG oslo_vmware.api [None req-d0c2c88e-7a42-49a4-91d1-ae22a314cb7c tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Waiting for the task: (returnval){ [ 2386.529584] env[62684]: value = "task-2053852" [ 2386.529584] env[62684]: _type = "Task" [ 2386.529584] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2386.537376] env[62684]: DEBUG oslo_vmware.api [None req-d0c2c88e-7a42-49a4-91d1-ae22a314cb7c tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053852, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2386.619729] env[62684]: DEBUG oslo_vmware.api [None req-89ea5ece-b6d6-4904-8c37-6c8ffc2033fd tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053850, 'name': PowerOffVM_Task, 'duration_secs': 0.225297} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2386.620015] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-89ea5ece-b6d6-4904-8c37-6c8ffc2033fd tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2386.620215] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-89ea5ece-b6d6-4904-8c37-6c8ffc2033fd tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2386.620488] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-32399b20-25e2-42ae-90b4-1573831783f8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2386.637629] env[62684]: DEBUG nova.network.neutron [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Updating instance_info_cache with network_info: [{"id": "7e4b9e76-bf05-4ee7-b25c-922484094be0", "address": "fa:16:3e:03:9c:84", "network": {"id": "1751424b-54a9-4879-9f32-aa15a9bb632c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-120070593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "263c101fcc5e493789b79dfd1ba97cc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92e4d027-e755-417b-8eea-9a8f24b85140", "external-id": "nsx-vlan-transportzone-756", "segmentation_id": 756, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e4b9e76-bf", "ovs_interfaceid": "7e4b9e76-bf05-4ee7-b25c-922484094be0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2386.724194] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-89ea5ece-b6d6-4904-8c37-6c8ffc2033fd tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2386.724455] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-89ea5ece-b6d6-4904-8c37-6c8ffc2033fd tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2386.724644] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-89ea5ece-b6d6-4904-8c37-6c8ffc2033fd tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Deleting the datastore file [datastore1] 5b3668f3-219d-4304-bc9e-9b911762085d {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2386.724949] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2639c993-ef29-489e-9723-5d7029ecd919 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2386.733838] env[62684]: DEBUG oslo_vmware.api [None req-89ea5ece-b6d6-4904-8c37-6c8ffc2033fd tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2386.733838] env[62684]: value = "task-2053854" [ 2386.733838] env[62684]: _type = "Task" [ 2386.733838] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2386.745252] env[62684]: DEBUG oslo_vmware.api [None req-89ea5ece-b6d6-4904-8c37-6c8ffc2033fd tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053854, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2386.877460] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 7d495b9d-b4c8-4e7b-8be7-96e47b4a6dd5] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2386.952185] env[62684]: DEBUG nova.network.neutron [req-db569d45-e46d-4e45-bcc2-07da3fedfe0f req-22359745-93da-4a6e-b9f5-8bfdc1881e60 service nova] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Updated VIF entry in instance network info cache for port 83b1b299-d863-45f1-9a11-2ffa0e2bd291. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2386.952639] env[62684]: DEBUG nova.network.neutron [req-db569d45-e46d-4e45-bcc2-07da3fedfe0f req-22359745-93da-4a6e-b9f5-8bfdc1881e60 service nova] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Updating instance_info_cache with network_info: [{"id": "83b1b299-d863-45f1-9a11-2ffa0e2bd291", "address": "fa:16:3e:4f:09:7f", "network": {"id": "e177c6d0-ddd5-4029-94af-c8f1b937dd9f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1344612161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27d04006afc747e19ad87238bfdbaad1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "171aeae0-6a27-44fc-bc3d-a2d5581fc702", "external-id": "nsx-vlan-transportzone-410", "segmentation_id": 410, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83b1b299-d8", "ovs_interfaceid": "83b1b299-d863-45f1-9a11-2ffa0e2bd291", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2386.992886] env[62684]: DEBUG oslo_vmware.api [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053851, 'name': RelocateVM_Task} progress is 42%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2387.045446] env[62684]: DEBUG oslo_vmware.api [None req-d0c2c88e-7a42-49a4-91d1-ae22a314cb7c tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053852, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2387.142269] env[62684]: DEBUG oslo_concurrency.lockutils [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Releasing lock "refresh_cache-b88d9418-7e90-473e-bd9a-18bc398faad0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2387.245593] env[62684]: DEBUG oslo_vmware.api [None req-89ea5ece-b6d6-4904-8c37-6c8ffc2033fd tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053854, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164177} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2387.245873] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-89ea5ece-b6d6-4904-8c37-6c8ffc2033fd tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2387.246069] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-89ea5ece-b6d6-4904-8c37-6c8ffc2033fd tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2387.246270] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-89ea5ece-b6d6-4904-8c37-6c8ffc2033fd tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2387.246452] env[62684]: INFO nova.compute.manager [None req-89ea5ece-b6d6-4904-8c37-6c8ffc2033fd tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Took 1.15 seconds to destroy the instance on the hypervisor. [ 2387.246700] env[62684]: DEBUG oslo.service.loopingcall [None req-89ea5ece-b6d6-4904-8c37-6c8ffc2033fd tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2387.246909] env[62684]: DEBUG nova.compute.manager [-] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2387.247026] env[62684]: DEBUG nova.network.neutron [-] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2387.380901] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 7bb3265d-68a5-4cae-b1d3-fcbc980e2bdd] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2387.455986] env[62684]: DEBUG oslo_concurrency.lockutils [req-db569d45-e46d-4e45-bcc2-07da3fedfe0f req-22359745-93da-4a6e-b9f5-8bfdc1881e60 service nova] Releasing lock "refresh_cache-8d22d555-f837-4eb3-9474-c1434649584e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2387.495576] env[62684]: DEBUG oslo_vmware.api [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053851, 'name': RelocateVM_Task} progress is 54%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2387.543740] env[62684]: DEBUG oslo_vmware.api [None req-d0c2c88e-7a42-49a4-91d1-ae22a314cb7c tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053852, 'name': PowerOnVM_Task, 'duration_secs': 0.952098} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2387.543941] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0c2c88e-7a42-49a4-91d1-ae22a314cb7c tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2387.544166] env[62684]: DEBUG nova.compute.manager [None req-d0c2c88e-7a42-49a4-91d1-ae22a314cb7c tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2387.545068] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9679f450-bbb9-4572-a99e-ec90162c0084 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2387.666276] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aacf7b6d-970e-46ed-8107-1e0e7829b71c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2387.694041] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a5698bf-7191-48d5-aa95-afa5ccd78a78 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2387.703642] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Updating instance 'b88d9418-7e90-473e-bd9a-18bc398faad0' progress to 83 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2387.833433] env[62684]: DEBUG nova.compute.manager [req-0ea05fbd-cfdf-4153-8be8-49c3fb6e2da2 req-284fa242-5f97-4cae-8a92-bcf259ca5786 service nova] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Received event network-vif-deleted-adc3c1c4-6d99-419c-b176-d3f75d6a908c {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2387.833433] env[62684]: INFO nova.compute.manager [req-0ea05fbd-cfdf-4153-8be8-49c3fb6e2da2 req-284fa242-5f97-4cae-8a92-bcf259ca5786 service nova] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Neutron deleted interface adc3c1c4-6d99-419c-b176-d3f75d6a908c; detaching it from the instance and deleting it from the info cache [ 2387.833433] env[62684]: DEBUG nova.network.neutron [req-0ea05fbd-cfdf-4153-8be8-49c3fb6e2da2 req-284fa242-5f97-4cae-8a92-bcf259ca5786 service nova] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2387.884587] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: aebbc2cc-8973-4907-9ec8-085027fd7ca3] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2387.993762] env[62684]: DEBUG oslo_vmware.api [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053851, 'name': RelocateVM_Task} progress is 69%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2388.213072] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2388.213897] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-09bf848b-01d5-40d9-b173-1cddc004a3ad {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2388.227965] env[62684]: DEBUG oslo_vmware.api [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2388.227965] env[62684]: value = "task-2053855" [ 2388.227965] env[62684]: _type = "Task" [ 2388.227965] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2388.239373] env[62684]: DEBUG oslo_vmware.api [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053855, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2388.307161] env[62684]: DEBUG nova.network.neutron [-] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2388.335726] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1f417871-f009-40b4-ac10-67fc82ab2073 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2388.349781] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4977d3d9-79eb-4dcd-afae-452b76536f2c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2388.385939] env[62684]: DEBUG nova.compute.manager [req-0ea05fbd-cfdf-4153-8be8-49c3fb6e2da2 req-284fa242-5f97-4cae-8a92-bcf259ca5786 service nova] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Detach interface failed, port_id=adc3c1c4-6d99-419c-b176-d3f75d6a908c, reason: Instance 5b3668f3-219d-4304-bc9e-9b911762085d could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2388.388754] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 9418b42d-9fff-41fd-92d1-a832017fc9c3] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2388.502246] env[62684]: DEBUG oslo_vmware.api [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053851, 'name': RelocateVM_Task} progress is 82%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2388.741749] env[62684]: DEBUG oslo_vmware.api [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053855, 'name': PowerOnVM_Task, 'duration_secs': 0.494331} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2388.741749] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2388.741902] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9a820ccd-7eb7-4712-898e-d685de418ca5 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Updating instance 'b88d9418-7e90-473e-bd9a-18bc398faad0' progress to 100 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2388.809816] env[62684]: INFO nova.compute.manager [-] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Took 1.56 seconds to deallocate network for instance. [ 2388.892877] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: f037d6b2-2082-4611-985e-b9a077eb8250] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2388.995791] env[62684]: DEBUG oslo_vmware.api [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053851, 'name': RelocateVM_Task} progress is 95%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2389.316682] env[62684]: DEBUG oslo_concurrency.lockutils [None req-89ea5ece-b6d6-4904-8c37-6c8ffc2033fd tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2389.316981] env[62684]: DEBUG oslo_concurrency.lockutils [None req-89ea5ece-b6d6-4904-8c37-6c8ffc2033fd tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2389.317232] env[62684]: DEBUG nova.objects.instance [None req-89ea5ece-b6d6-4904-8c37-6c8ffc2033fd tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lazy-loading 'resources' on Instance uuid 5b3668f3-219d-4304-bc9e-9b911762085d {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2389.396584] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 0a8d7c48-cf90-4baf-a900-38fbd62869a6] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2389.494998] env[62684]: DEBUG oslo_vmware.api [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053851, 'name': RelocateVM_Task} progress is 97%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2389.900609] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 6b461482-0606-4af3-98a2-88c0318d1a69] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2389.946753] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f06ed32a-c4a0-452a-995b-b31ed9f0b07f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2389.954835] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc7aacac-9ca2-4b03-88c4-22d9b48e34d6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2389.985765] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7397266-d3af-42be-9ebb-6bc43b06c096 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2389.998888] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05991dea-4150-4e0c-80ec-c24b43e4fef5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2390.002506] env[62684]: DEBUG oslo_vmware.api [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053851, 'name': RelocateVM_Task} progress is 98%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2390.012272] env[62684]: DEBUG nova.compute.provider_tree [None req-89ea5ece-b6d6-4904-8c37-6c8ffc2033fd tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2390.013449] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8658d719-6a62-4b8f-a7a2-4e963c11b5b1 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Acquiring lock "7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2390.013678] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8658d719-6a62-4b8f-a7a2-4e963c11b5b1 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Lock "7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2390.013879] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8658d719-6a62-4b8f-a7a2-4e963c11b5b1 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Acquiring lock "7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2390.014080] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8658d719-6a62-4b8f-a7a2-4e963c11b5b1 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Lock "7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2390.014250] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8658d719-6a62-4b8f-a7a2-4e963c11b5b1 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Lock "7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2390.015963] env[62684]: INFO nova.compute.manager [None req-8658d719-6a62-4b8f-a7a2-4e963c11b5b1 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Terminating instance [ 2390.017717] env[62684]: DEBUG nova.compute.manager [None req-8658d719-6a62-4b8f-a7a2-4e963c11b5b1 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2390.017912] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8658d719-6a62-4b8f-a7a2-4e963c11b5b1 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2390.018649] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ca5bf0b-9bba-491e-8e83-9ed086a0ece4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2390.025892] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8658d719-6a62-4b8f-a7a2-4e963c11b5b1 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2390.026126] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7dd8e2d1-8c32-4ca1-befb-5069a664c07a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2390.031779] env[62684]: DEBUG oslo_vmware.api [None req-8658d719-6a62-4b8f-a7a2-4e963c11b5b1 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Waiting for the task: (returnval){ [ 2390.031779] env[62684]: value = "task-2053856" [ 2390.031779] env[62684]: _type = "Task" [ 2390.031779] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2390.040366] env[62684]: DEBUG oslo_vmware.api [None req-8658d719-6a62-4b8f-a7a2-4e963c11b5b1 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053856, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2390.403582] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 2eab4a07-9b92-436e-b4f8-fa64ae949b56] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2390.498386] env[62684]: DEBUG oslo_vmware.api [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053851, 'name': RelocateVM_Task} progress is 98%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2390.517480] env[62684]: DEBUG nova.scheduler.client.report [None req-89ea5ece-b6d6-4904-8c37-6c8ffc2033fd tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2390.541947] env[62684]: DEBUG oslo_vmware.api [None req-8658d719-6a62-4b8f-a7a2-4e963c11b5b1 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053856, 'name': PowerOffVM_Task, 'duration_secs': 0.418253} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2390.542214] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8658d719-6a62-4b8f-a7a2-4e963c11b5b1 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2390.542406] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8658d719-6a62-4b8f-a7a2-4e963c11b5b1 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2390.542700] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7dd501f3-bc66-4169-9707-47c55d65ef8a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2390.696036] env[62684]: DEBUG nova.network.neutron [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Port 7e4b9e76-bf05-4ee7-b25c-922484094be0 binding to destination host cpu-1 is already ACTIVE {{(pid=62684) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2390.696371] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "refresh_cache-b88d9418-7e90-473e-bd9a-18bc398faad0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2390.696555] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquired lock "refresh_cache-b88d9418-7e90-473e-bd9a-18bc398faad0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2390.696747] env[62684]: DEBUG nova.network.neutron [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2390.907182] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 3a967adf-8c46-4787-b1d1-4ed701399576] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2390.998127] env[62684]: DEBUG oslo_vmware.api [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053851, 'name': RelocateVM_Task, 'duration_secs': 4.158296} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2390.998379] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Volume attach. Driver type: vmdk {{(pid=62684) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2390.998590] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421420', 'volume_id': 'c148a6a9-f313-4a0d-8466-261a9903a3c8', 'name': 'volume-c148a6a9-f313-4a0d-8466-261a9903a3c8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8d22d555-f837-4eb3-9474-c1434649584e', 'attached_at': '', 'detached_at': '', 'volume_id': 'c148a6a9-f313-4a0d-8466-261a9903a3c8', 'serial': 'c148a6a9-f313-4a0d-8466-261a9903a3c8'} {{(pid=62684) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2390.999399] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5683662c-955b-4634-86b1-1a26180573ba {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2391.014269] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d947b7ad-0d36-474e-9335-cba092750c13 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2391.027159] env[62684]: DEBUG oslo_concurrency.lockutils [None req-89ea5ece-b6d6-4904-8c37-6c8ffc2033fd tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.710s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2391.037153] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] volume-c148a6a9-f313-4a0d-8466-261a9903a3c8/volume-c148a6a9-f313-4a0d-8466-261a9903a3c8.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2391.038029] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e0c9045c-bee2-48ea-b6ee-ad359649ec5a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2391.053223] env[62684]: INFO nova.scheduler.client.report [None req-89ea5ece-b6d6-4904-8c37-6c8ffc2033fd tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Deleted allocations for instance 5b3668f3-219d-4304-bc9e-9b911762085d [ 2391.057849] env[62684]: DEBUG oslo_vmware.api [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2391.057849] env[62684]: value = "task-2053858" [ 2391.057849] env[62684]: _type = "Task" [ 2391.057849] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2391.066184] env[62684]: DEBUG oslo_vmware.api [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053858, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2391.410412] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: e8c90faa-2c25-4308-9781-80d308b9211c] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2391.450756] env[62684]: DEBUG nova.network.neutron [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Updating instance_info_cache with network_info: [{"id": "7e4b9e76-bf05-4ee7-b25c-922484094be0", "address": "fa:16:3e:03:9c:84", "network": {"id": "1751424b-54a9-4879-9f32-aa15a9bb632c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-120070593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "263c101fcc5e493789b79dfd1ba97cc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92e4d027-e755-417b-8eea-9a8f24b85140", "external-id": "nsx-vlan-transportzone-756", "segmentation_id": 756, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e4b9e76-bf", "ovs_interfaceid": "7e4b9e76-bf05-4ee7-b25c-922484094be0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2391.566025] env[62684]: DEBUG oslo_concurrency.lockutils [None req-89ea5ece-b6d6-4904-8c37-6c8ffc2033fd tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "5b3668f3-219d-4304-bc9e-9b911762085d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.476s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2391.573141] env[62684]: DEBUG oslo_vmware.api [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053858, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2391.914214] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 8cc68353-4678-4ee7-8c0d-3d71e6bf05bf] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2391.953662] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Releasing lock "refresh_cache-b88d9418-7e90-473e-bd9a-18bc398faad0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2392.074020] env[62684]: DEBUG oslo_vmware.api [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053858, 'name': ReconfigVM_Task, 'duration_secs': 0.774732} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2392.074324] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Reconfigured VM instance instance-00000076 to attach disk [datastore1] volume-c148a6a9-f313-4a0d-8466-261a9903a3c8/volume-c148a6a9-f313-4a0d-8466-261a9903a3c8.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2392.078927] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-219f1762-6e7e-403a-b385-0d5c87b79395 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2392.093594] env[62684]: DEBUG oslo_vmware.api [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2392.093594] env[62684]: value = "task-2053859" [ 2392.093594] env[62684]: _type = "Task" [ 2392.093594] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2392.101148] env[62684]: DEBUG oslo_vmware.api [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053859, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2392.417522] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 28886f7c-6efc-4505-84f6-682d75cea215] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2392.456851] env[62684]: DEBUG nova.compute.manager [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62684) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:898}} [ 2392.457100] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2392.457338] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2392.604267] env[62684]: DEBUG oslo_vmware.api [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053859, 'name': ReconfigVM_Task, 'duration_secs': 0.120168} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2392.604548] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421420', 'volume_id': 'c148a6a9-f313-4a0d-8466-261a9903a3c8', 'name': 'volume-c148a6a9-f313-4a0d-8466-261a9903a3c8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8d22d555-f837-4eb3-9474-c1434649584e', 'attached_at': '', 'detached_at': '', 'volume_id': 'c148a6a9-f313-4a0d-8466-261a9903a3c8', 'serial': 'c148a6a9-f313-4a0d-8466-261a9903a3c8'} {{(pid=62684) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2392.605113] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cce1bc7b-68a7-4e2b-8746-837ae2a7cf00 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2392.611796] env[62684]: DEBUG oslo_vmware.api [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2392.611796] env[62684]: value = "task-2053860" [ 2392.611796] env[62684]: _type = "Task" [ 2392.611796] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2392.619635] env[62684]: DEBUG oslo_vmware.api [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053860, 'name': Rename_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2392.920820] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: daf1486b-d5c2-4341-8a27-36eeeb08cd26] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2392.960304] env[62684]: DEBUG nova.objects.instance [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lazy-loading 'migration_context' on Instance uuid b88d9418-7e90-473e-bd9a-18bc398faad0 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2393.123164] env[62684]: DEBUG oslo_vmware.api [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053860, 'name': Rename_Task, 'duration_secs': 0.170631} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2393.123464] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2393.123833] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-23d55be5-3fb6-4027-864b-553ad060697e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2393.130508] env[62684]: DEBUG oslo_vmware.api [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2393.130508] env[62684]: value = "task-2053861" [ 2393.130508] env[62684]: _type = "Task" [ 2393.130508] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2393.138107] env[62684]: DEBUG oslo_vmware.api [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053861, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2393.424056] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 4cf48f05-d643-47e6-9a0b-33415d80890c] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2393.579357] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4eb2167-efbe-4963-93dc-06e2ce756195 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2393.587274] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fda7a57-2099-43e6-ba69-0fc2e0763046 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2393.618783] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17d7cc34-e026-46b7-976a-821acb786c67 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2393.625959] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f641afa9-bb32-4241-9e21-beec713e66f7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2393.641699] env[62684]: DEBUG nova.compute.provider_tree [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2393.647733] env[62684]: DEBUG oslo_vmware.api [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053861, 'name': PowerOnVM_Task, 'duration_secs': 0.412206} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2393.647994] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2393.648226] env[62684]: INFO nova.compute.manager [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Took 8.45 seconds to spawn the instance on the hypervisor. [ 2393.648429] env[62684]: DEBUG nova.compute.manager [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2393.649203] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9dd912c-0f78-40ca-a668-a1f9de3c4f15 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2393.924969] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8658d719-6a62-4b8f-a7a2-4e963c11b5b1 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2393.925308] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8658d719-6a62-4b8f-a7a2-4e963c11b5b1 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2393.925422] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-8658d719-6a62-4b8f-a7a2-4e963c11b5b1 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Deleting the datastore file [datastore2] 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2393.925694] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0fba9d46-ff3f-4b36-8035-ad67d560cac1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2393.927758] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 23578214-6708-43ae-88ce-56212083532a] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2393.935669] env[62684]: DEBUG oslo_vmware.api [None req-8658d719-6a62-4b8f-a7a2-4e963c11b5b1 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Waiting for the task: (returnval){ [ 2393.935669] env[62684]: value = "task-2053862" [ 2393.935669] env[62684]: _type = "Task" [ 2393.935669] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2393.943775] env[62684]: DEBUG oslo_vmware.api [None req-8658d719-6a62-4b8f-a7a2-4e963c11b5b1 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053862, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2394.112040] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "603b2c96-44f1-45a7-8209-b799662a3e42" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2394.112206] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "603b2c96-44f1-45a7-8209-b799662a3e42" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2394.144424] env[62684]: DEBUG nova.scheduler.client.report [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2394.167311] env[62684]: INFO nova.compute.manager [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Took 16.14 seconds to build instance. [ 2394.430905] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 983218ac-7cf3-48ef-88d8-aa9e9322df4b] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2394.446612] env[62684]: DEBUG oslo_vmware.api [None req-8658d719-6a62-4b8f-a7a2-4e963c11b5b1 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053862, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146685} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2394.446951] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-8658d719-6a62-4b8f-a7a2-4e963c11b5b1 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2394.447078] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8658d719-6a62-4b8f-a7a2-4e963c11b5b1 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2394.447267] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8658d719-6a62-4b8f-a7a2-4e963c11b5b1 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2394.447890] env[62684]: INFO nova.compute.manager [None req-8658d719-6a62-4b8f-a7a2-4e963c11b5b1 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Took 4.43 seconds to destroy the instance on the hypervisor. [ 2394.447890] env[62684]: DEBUG oslo.service.loopingcall [None req-8658d719-6a62-4b8f-a7a2-4e963c11b5b1 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2394.447890] env[62684]: DEBUG nova.compute.manager [-] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2394.448091] env[62684]: DEBUG nova.network.neutron [-] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2394.616496] env[62684]: DEBUG nova.compute.manager [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2394.669718] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8b218f01-95d8-469a-ac61-be5112a899e8 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "8d22d555-f837-4eb3-9474-c1434649584e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.652s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2394.924069] env[62684]: DEBUG nova.compute.manager [req-ef6cf1da-b09a-4e76-b41a-7b022403c85d req-c60289c9-79a4-4475-80d2-69279af85b7a service nova] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Received event network-vif-deleted-dee84b05-71ef-4a8b-8cfa-64eea8bf277e {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2394.924283] env[62684]: INFO nova.compute.manager [req-ef6cf1da-b09a-4e76-b41a-7b022403c85d req-c60289c9-79a4-4475-80d2-69279af85b7a service nova] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Neutron deleted interface dee84b05-71ef-4a8b-8cfa-64eea8bf277e; detaching it from the instance and deleting it from the info cache [ 2394.924464] env[62684]: DEBUG nova.network.neutron [req-ef6cf1da-b09a-4e76-b41a-7b022403c85d req-c60289c9-79a4-4475-80d2-69279af85b7a service nova] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2394.934765] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 9964237b-db9b-49cc-a9bd-d62329ea564e] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2395.146611] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2395.156423] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.699s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2395.161948] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.015s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2395.163817] env[62684]: INFO nova.compute.claims [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2395.404100] env[62684]: DEBUG nova.network.neutron [-] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2395.427335] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-357fb6ed-9cf7-483c-b50d-889ba8365ad2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2395.437544] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73d5806a-bf86-4ea7-828a-753495c7207a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2395.448444] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 42ae6edd-e1f5-4ef8-a248-8f02e94d798e] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2395.469123] env[62684]: DEBUG nova.compute.manager [req-ef6cf1da-b09a-4e76-b41a-7b022403c85d req-c60289c9-79a4-4475-80d2-69279af85b7a service nova] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Detach interface failed, port_id=dee84b05-71ef-4a8b-8cfa-64eea8bf277e, reason: Instance 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2395.906909] env[62684]: INFO nova.compute.manager [-] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Took 1.46 seconds to deallocate network for instance. [ 2395.952326] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 81b7949d-be24-46c9-8dc8-c249b65bb039] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2396.316160] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2419ed3-b3a2-4277-9f78-a0cb9f3f2c14 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2396.324468] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff45cf9f-468e-432c-8bec-6275cc649d81 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2396.354272] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d29faae-6619-4f4a-94f6-0256538eb552 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2396.361757] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24a1d39a-1743-4cff-b802-5eb54fee2778 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2396.376463] env[62684]: DEBUG nova.compute.provider_tree [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2396.413601] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8658d719-6a62-4b8f-a7a2-4e963c11b5b1 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2396.456695] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 2baabe7a-ed33-4cef-9acc-a7b804610b0a] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2396.559737] env[62684]: DEBUG nova.compute.manager [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Stashing vm_state: active {{(pid=62684) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 2396.698195] env[62684]: INFO nova.compute.manager [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Swapping old allocation on dict_keys(['c23c281e-ec1f-4876-972e-a98655f2084f']) held by migration bab91836-d491-4348-8ab7-41452807a6c5 for instance [ 2396.719611] env[62684]: DEBUG nova.scheduler.client.report [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Overwriting current allocation {'allocations': {'c23c281e-ec1f-4876-972e-a98655f2084f': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 185}}, 'project_id': '263c101fcc5e493789b79dfd1ba97cc0', 'user_id': 'e3a532747bda4c7e8aa2892b424a47ed', 'consumer_generation': 1} on consumer b88d9418-7e90-473e-bd9a-18bc398faad0 {{(pid=62684) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 2396.799534] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "refresh_cache-b88d9418-7e90-473e-bd9a-18bc398faad0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2396.799754] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquired lock "refresh_cache-b88d9418-7e90-473e-bd9a-18bc398faad0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2396.799937] env[62684]: DEBUG nova.network.neutron [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2396.879578] env[62684]: DEBUG nova.scheduler.client.report [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2396.958105] env[62684]: DEBUG nova.compute.manager [req-326517e9-7743-46b2-a8a1-e10cdd215d4f req-4e5cefe5-f699-4dd1-9a74-98ecb49aa620 service nova] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Received event network-changed-f5c06971-b96a-4fa0-858e-5e47100e2e68 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2396.958105] env[62684]: DEBUG nova.compute.manager [req-326517e9-7743-46b2-a8a1-e10cdd215d4f req-4e5cefe5-f699-4dd1-9a74-98ecb49aa620 service nova] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Refreshing instance network info cache due to event network-changed-f5c06971-b96a-4fa0-858e-5e47100e2e68. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2396.958105] env[62684]: DEBUG oslo_concurrency.lockutils [req-326517e9-7743-46b2-a8a1-e10cdd215d4f req-4e5cefe5-f699-4dd1-9a74-98ecb49aa620 service nova] Acquiring lock "refresh_cache-0156d807-1ab4-482f-91d1-172bf32bf23c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2396.958105] env[62684]: DEBUG oslo_concurrency.lockutils [req-326517e9-7743-46b2-a8a1-e10cdd215d4f req-4e5cefe5-f699-4dd1-9a74-98ecb49aa620 service nova] Acquired lock "refresh_cache-0156d807-1ab4-482f-91d1-172bf32bf23c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2396.958450] env[62684]: DEBUG nova.network.neutron [req-326517e9-7743-46b2-a8a1-e10cdd215d4f req-4e5cefe5-f699-4dd1-9a74-98ecb49aa620 service nova] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Refreshing network info cache for port f5c06971-b96a-4fa0-858e-5e47100e2e68 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2396.961053] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 57537508-06e7-43a4-95c5-c4399b8bf93f] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2397.076232] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2397.387047] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.223s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2397.387047] env[62684]: DEBUG nova.compute.manager [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2397.388304] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8658d719-6a62-4b8f-a7a2-4e963c11b5b1 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.975s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2397.389454] env[62684]: DEBUG nova.objects.instance [None req-8658d719-6a62-4b8f-a7a2-4e963c11b5b1 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Lazy-loading 'resources' on Instance uuid 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2397.464163] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: ca3d1a73-6f3b-4278-8fe7-03b66f407ba6] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2397.552019] env[62684]: DEBUG nova.network.neutron [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Updating instance_info_cache with network_info: [{"id": "7e4b9e76-bf05-4ee7-b25c-922484094be0", "address": "fa:16:3e:03:9c:84", "network": {"id": "1751424b-54a9-4879-9f32-aa15a9bb632c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-120070593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "263c101fcc5e493789b79dfd1ba97cc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92e4d027-e755-417b-8eea-9a8f24b85140", "external-id": "nsx-vlan-transportzone-756", "segmentation_id": 756, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e4b9e76-bf", "ovs_interfaceid": "7e4b9e76-bf05-4ee7-b25c-922484094be0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2397.648230] env[62684]: DEBUG nova.network.neutron [req-326517e9-7743-46b2-a8a1-e10cdd215d4f req-4e5cefe5-f699-4dd1-9a74-98ecb49aa620 service nova] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Updated VIF entry in instance network info cache for port f5c06971-b96a-4fa0-858e-5e47100e2e68. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2397.648610] env[62684]: DEBUG nova.network.neutron [req-326517e9-7743-46b2-a8a1-e10cdd215d4f req-4e5cefe5-f699-4dd1-9a74-98ecb49aa620 service nova] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Updating instance_info_cache with network_info: [{"id": "f5c06971-b96a-4fa0-858e-5e47100e2e68", "address": "fa:16:3e:9e:fc:9d", "network": {"id": "e177c6d0-ddd5-4029-94af-c8f1b937dd9f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1344612161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27d04006afc747e19ad87238bfdbaad1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "171aeae0-6a27-44fc-bc3d-a2d5581fc702", "external-id": "nsx-vlan-transportzone-410", "segmentation_id": 410, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5c06971-b9", "ovs_interfaceid": "f5c06971-b96a-4fa0-858e-5e47100e2e68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2397.891340] env[62684]: DEBUG nova.compute.utils [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2397.896631] env[62684]: DEBUG nova.compute.manager [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2397.896631] env[62684]: DEBUG nova.network.neutron [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2397.941680] env[62684]: DEBUG nova.policy [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '58ea1db87d2b44408282a8b82d799443', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '947e7359aaba456fa1763f4dc8e9d359', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2397.966977] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: b1f70e39-bf37-4fb8-b95b-653b59bec265] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2398.013983] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f84e12b9-3beb-49cd-bc0b-6c335a5d5809 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2398.023450] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f52ec3e6-97e5-4d92-97ed-913eb1ef3c99 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2398.052322] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc09c3f3-de13-4925-a657-f363061431a8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2398.054900] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Releasing lock "refresh_cache-b88d9418-7e90-473e-bd9a-18bc398faad0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2398.055342] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2398.055849] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ce00171d-2b62-4791-8295-8e342c741ddc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2398.063528] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c4affca-0655-458d-999a-89e4deec8e19 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2398.067161] env[62684]: DEBUG oslo_vmware.api [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2398.067161] env[62684]: value = "task-2053863" [ 2398.067161] env[62684]: _type = "Task" [ 2398.067161] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2398.078535] env[62684]: DEBUG nova.compute.provider_tree [None req-8658d719-6a62-4b8f-a7a2-4e963c11b5b1 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2398.084795] env[62684]: DEBUG oslo_vmware.api [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053863, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2398.151889] env[62684]: DEBUG oslo_concurrency.lockutils [req-326517e9-7743-46b2-a8a1-e10cdd215d4f req-4e5cefe5-f699-4dd1-9a74-98ecb49aa620 service nova] Releasing lock "refresh_cache-0156d807-1ab4-482f-91d1-172bf32bf23c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2398.151889] env[62684]: DEBUG nova.compute.manager [req-326517e9-7743-46b2-a8a1-e10cdd215d4f req-4e5cefe5-f699-4dd1-9a74-98ecb49aa620 service nova] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Received event network-changed-83b1b299-d863-45f1-9a11-2ffa0e2bd291 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2398.152072] env[62684]: DEBUG nova.compute.manager [req-326517e9-7743-46b2-a8a1-e10cdd215d4f req-4e5cefe5-f699-4dd1-9a74-98ecb49aa620 service nova] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Refreshing instance network info cache due to event network-changed-83b1b299-d863-45f1-9a11-2ffa0e2bd291. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2398.152270] env[62684]: DEBUG oslo_concurrency.lockutils [req-326517e9-7743-46b2-a8a1-e10cdd215d4f req-4e5cefe5-f699-4dd1-9a74-98ecb49aa620 service nova] Acquiring lock "refresh_cache-8d22d555-f837-4eb3-9474-c1434649584e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2398.152444] env[62684]: DEBUG oslo_concurrency.lockutils [req-326517e9-7743-46b2-a8a1-e10cdd215d4f req-4e5cefe5-f699-4dd1-9a74-98ecb49aa620 service nova] Acquired lock "refresh_cache-8d22d555-f837-4eb3-9474-c1434649584e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2398.152629] env[62684]: DEBUG nova.network.neutron [req-326517e9-7743-46b2-a8a1-e10cdd215d4f req-4e5cefe5-f699-4dd1-9a74-98ecb49aa620 service nova] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Refreshing network info cache for port 83b1b299-d863-45f1-9a11-2ffa0e2bd291 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2398.208143] env[62684]: DEBUG nova.network.neutron [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Successfully created port: cc3f2e6e-2b58-4d5c-a8a3-5c8f5d710d31 {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2398.399081] env[62684]: DEBUG nova.compute.manager [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2398.470013] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: ffce9a72-e214-4bcc-bd9a-92abdb4a5ef1] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2398.577292] env[62684]: DEBUG oslo_vmware.api [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053863, 'name': PowerOffVM_Task, 'duration_secs': 0.197594} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2398.577561] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2398.578394] env[62684]: DEBUG nova.virt.hardware [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2398.578643] env[62684]: DEBUG nova.virt.hardware [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2398.578810] env[62684]: DEBUG nova.virt.hardware [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2398.578997] env[62684]: DEBUG nova.virt.hardware [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2398.579221] env[62684]: DEBUG nova.virt.hardware [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2398.579390] env[62684]: DEBUG nova.virt.hardware [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2398.579603] env[62684]: DEBUG nova.virt.hardware [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2398.579768] env[62684]: DEBUG nova.virt.hardware [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2398.579940] env[62684]: DEBUG nova.virt.hardware [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2398.580123] env[62684]: DEBUG nova.virt.hardware [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2398.580307] env[62684]: DEBUG nova.virt.hardware [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2398.585817] env[62684]: DEBUG nova.scheduler.client.report [None req-8658d719-6a62-4b8f-a7a2-4e963c11b5b1 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2398.589559] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-79cd8708-50cb-4dec-874c-5edeea504974 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2398.605905] env[62684]: DEBUG oslo_vmware.api [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2398.605905] env[62684]: value = "task-2053864" [ 2398.605905] env[62684]: _type = "Task" [ 2398.605905] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2398.616376] env[62684]: DEBUG oslo_vmware.api [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053864, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2398.968605] env[62684]: DEBUG nova.network.neutron [req-326517e9-7743-46b2-a8a1-e10cdd215d4f req-4e5cefe5-f699-4dd1-9a74-98ecb49aa620 service nova] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Updated VIF entry in instance network info cache for port 83b1b299-d863-45f1-9a11-2ffa0e2bd291. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2398.968998] env[62684]: DEBUG nova.network.neutron [req-326517e9-7743-46b2-a8a1-e10cdd215d4f req-4e5cefe5-f699-4dd1-9a74-98ecb49aa620 service nova] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Updating instance_info_cache with network_info: [{"id": "83b1b299-d863-45f1-9a11-2ffa0e2bd291", "address": "fa:16:3e:4f:09:7f", "network": {"id": "e177c6d0-ddd5-4029-94af-c8f1b937dd9f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1344612161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.224", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27d04006afc747e19ad87238bfdbaad1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "171aeae0-6a27-44fc-bc3d-a2d5581fc702", "external-id": "nsx-vlan-transportzone-410", "segmentation_id": 410, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83b1b299-d8", "ovs_interfaceid": "83b1b299-d863-45f1-9a11-2ffa0e2bd291", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2398.973202] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 025dfe36-1f14-4bda-84a0-d424364b745b] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2399.090220] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8658d719-6a62-4b8f-a7a2-4e963c11b5b1 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.702s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2399.092527] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 2.016s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2399.111803] env[62684]: INFO nova.scheduler.client.report [None req-8658d719-6a62-4b8f-a7a2-4e963c11b5b1 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Deleted allocations for instance 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d [ 2399.119030] env[62684]: DEBUG oslo_vmware.api [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053864, 'name': ReconfigVM_Task, 'duration_secs': 0.134232} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2399.120074] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bf14a30-8539-4af3-a0f9-14bf994bf95f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2399.139058] env[62684]: DEBUG nova.virt.hardware [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2399.139220] env[62684]: DEBUG nova.virt.hardware [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2399.139267] env[62684]: DEBUG nova.virt.hardware [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2399.139498] env[62684]: DEBUG nova.virt.hardware [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2399.139669] env[62684]: DEBUG nova.virt.hardware [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2399.139829] env[62684]: DEBUG nova.virt.hardware [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2399.140062] env[62684]: DEBUG nova.virt.hardware [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2399.140310] env[62684]: DEBUG nova.virt.hardware [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2399.140425] env[62684]: DEBUG nova.virt.hardware [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2399.140587] env[62684]: DEBUG nova.virt.hardware [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2399.140868] env[62684]: DEBUG nova.virt.hardware [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2399.142514] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8eedd03-739e-4e1f-93ee-9a06e9d8404a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2399.150541] env[62684]: DEBUG oslo_vmware.api [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2399.150541] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d1b40d-e5a1-0cf4-f800-ece368fff064" [ 2399.150541] env[62684]: _type = "Task" [ 2399.150541] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2399.161871] env[62684]: DEBUG oslo_vmware.api [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d1b40d-e5a1-0cf4-f800-ece368fff064, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2399.408665] env[62684]: DEBUG nova.compute.manager [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2399.434775] env[62684]: DEBUG nova.virt.hardware [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2399.435523] env[62684]: DEBUG nova.virt.hardware [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2399.435523] env[62684]: DEBUG nova.virt.hardware [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2399.435523] env[62684]: DEBUG nova.virt.hardware [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2399.435661] env[62684]: DEBUG nova.virt.hardware [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2399.436021] env[62684]: DEBUG nova.virt.hardware [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2399.436104] env[62684]: DEBUG nova.virt.hardware [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2399.436210] env[62684]: DEBUG nova.virt.hardware [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2399.436387] env[62684]: DEBUG nova.virt.hardware [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2399.436556] env[62684]: DEBUG nova.virt.hardware [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2399.436734] env[62684]: DEBUG nova.virt.hardware [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2399.437837] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a087b986-5ad0-4877-ac56-35eb2a7a23aa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2399.446793] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-715ca985-7141-4bf8-b1b6-a0a389f13f88 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2399.471967] env[62684]: DEBUG oslo_concurrency.lockutils [req-326517e9-7743-46b2-a8a1-e10cdd215d4f req-4e5cefe5-f699-4dd1-9a74-98ecb49aa620 service nova] Releasing lock "refresh_cache-8d22d555-f837-4eb3-9474-c1434649584e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2399.475685] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2399.596738] env[62684]: INFO nova.compute.claims [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2399.602621] env[62684]: DEBUG nova.compute.manager [req-db5da03a-a75a-44df-b698-2f030a8f55be req-bd19bc94-1c9b-42fa-a8bf-0cd7d062428d service nova] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Received event network-vif-plugged-cc3f2e6e-2b58-4d5c-a8a3-5c8f5d710d31 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2399.602846] env[62684]: DEBUG oslo_concurrency.lockutils [req-db5da03a-a75a-44df-b698-2f030a8f55be req-bd19bc94-1c9b-42fa-a8bf-0cd7d062428d service nova] Acquiring lock "603b2c96-44f1-45a7-8209-b799662a3e42-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2399.603067] env[62684]: DEBUG oslo_concurrency.lockutils [req-db5da03a-a75a-44df-b698-2f030a8f55be req-bd19bc94-1c9b-42fa-a8bf-0cd7d062428d service nova] Lock "603b2c96-44f1-45a7-8209-b799662a3e42-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2399.603241] env[62684]: DEBUG oslo_concurrency.lockutils [req-db5da03a-a75a-44df-b698-2f030a8f55be req-bd19bc94-1c9b-42fa-a8bf-0cd7d062428d service nova] Lock "603b2c96-44f1-45a7-8209-b799662a3e42-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2399.603415] env[62684]: DEBUG nova.compute.manager [req-db5da03a-a75a-44df-b698-2f030a8f55be req-bd19bc94-1c9b-42fa-a8bf-0cd7d062428d service nova] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] No waiting events found dispatching network-vif-plugged-cc3f2e6e-2b58-4d5c-a8a3-5c8f5d710d31 {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2399.603677] env[62684]: WARNING nova.compute.manager [req-db5da03a-a75a-44df-b698-2f030a8f55be req-bd19bc94-1c9b-42fa-a8bf-0cd7d062428d service nova] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Received unexpected event network-vif-plugged-cc3f2e6e-2b58-4d5c-a8a3-5c8f5d710d31 for instance with vm_state building and task_state spawning. [ 2399.622428] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8658d719-6a62-4b8f-a7a2-4e963c11b5b1 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Lock "7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.609s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2399.660665] env[62684]: DEBUG oslo_vmware.api [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52d1b40d-e5a1-0cf4-f800-ece368fff064, 'name': SearchDatastore_Task, 'duration_secs': 0.011028} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2399.665905] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Reconfiguring VM instance instance-00000070 to detach disk 2000 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2399.666192] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-63e1e79e-a1c0-44a9-ac22-ee801a78f08e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2399.684969] env[62684]: DEBUG oslo_vmware.api [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2399.684969] env[62684]: value = "task-2053865" [ 2399.684969] env[62684]: _type = "Task" [ 2399.684969] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2399.689960] env[62684]: DEBUG nova.network.neutron [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Successfully updated port: cc3f2e6e-2b58-4d5c-a8a3-5c8f5d710d31 {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2399.693767] env[62684]: DEBUG oslo_vmware.api [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053865, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2399.941984] env[62684]: DEBUG oslo_concurrency.lockutils [None req-04df2d7e-8dc6-41e1-b247-e68edd208baf tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Acquiring lock "d540b43f-5bf9-47df-b319-97a1bae7ffc0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2399.942302] env[62684]: DEBUG oslo_concurrency.lockutils [None req-04df2d7e-8dc6-41e1-b247-e68edd208baf tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Lock "d540b43f-5bf9-47df-b319-97a1bae7ffc0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2399.942568] env[62684]: DEBUG oslo_concurrency.lockutils [None req-04df2d7e-8dc6-41e1-b247-e68edd208baf tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Acquiring lock "d540b43f-5bf9-47df-b319-97a1bae7ffc0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2399.942765] env[62684]: DEBUG oslo_concurrency.lockutils [None req-04df2d7e-8dc6-41e1-b247-e68edd208baf tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Lock "d540b43f-5bf9-47df-b319-97a1bae7ffc0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2399.942941] env[62684]: DEBUG oslo_concurrency.lockutils [None req-04df2d7e-8dc6-41e1-b247-e68edd208baf tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Lock "d540b43f-5bf9-47df-b319-97a1bae7ffc0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2399.944988] env[62684]: INFO nova.compute.manager [None req-04df2d7e-8dc6-41e1-b247-e68edd208baf tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Terminating instance [ 2399.946683] env[62684]: DEBUG nova.compute.manager [None req-04df2d7e-8dc6-41e1-b247-e68edd208baf tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2399.946889] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-04df2d7e-8dc6-41e1-b247-e68edd208baf tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2399.947723] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf40203-c9d6-472c-84b8-b3d590cea63b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2399.955237] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-04df2d7e-8dc6-41e1-b247-e68edd208baf tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2399.955469] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5982aef2-6da2-4b7b-b3e3-8903ce51a042 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2399.962135] env[62684]: DEBUG oslo_vmware.api [None req-04df2d7e-8dc6-41e1-b247-e68edd208baf tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Waiting for the task: (returnval){ [ 2399.962135] env[62684]: value = "task-2053866" [ 2399.962135] env[62684]: _type = "Task" [ 2399.962135] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2399.969765] env[62684]: DEBUG oslo_vmware.api [None req-04df2d7e-8dc6-41e1-b247-e68edd208baf tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053866, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2400.103280] env[62684]: INFO nova.compute.resource_tracker [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Updating resource usage from migration 6a9e0a6e-8ad9-4b93-855d-75cb80c13130 [ 2400.198322] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "refresh_cache-603b2c96-44f1-45a7-8209-b799662a3e42" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2400.198628] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquired lock "refresh_cache-603b2c96-44f1-45a7-8209-b799662a3e42" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2400.198842] env[62684]: DEBUG nova.network.neutron [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2400.200562] env[62684]: DEBUG oslo_vmware.api [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053865, 'name': ReconfigVM_Task, 'duration_secs': 0.183679} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2400.203802] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Reconfigured VM instance instance-00000070 to detach disk 2000 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2400.205304] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de9d744f-da26-4966-a9f1-7cc4a5d51299 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2400.230518] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] b88d9418-7e90-473e-bd9a-18bc398faad0/b88d9418-7e90-473e-bd9a-18bc398faad0.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2400.232100] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-23f2f116-c251-4d40-a7e8-78c00d0235c0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2400.246324] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b341c7a0-6103-4dd3-8dfe-cde8e6b22384 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2400.254322] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a34e1f1-89a6-41fa-b6f8-95b3a19f86df {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2400.258655] env[62684]: DEBUG oslo_vmware.api [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2400.258655] env[62684]: value = "task-2053867" [ 2400.258655] env[62684]: _type = "Task" [ 2400.258655] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2400.287868] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-144504f1-4150-4dfe-b750-68f07d653baa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2400.293035] env[62684]: DEBUG oslo_vmware.api [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053867, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2400.297784] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13b83852-0a36-4136-9d80-26562b894916 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2400.310539] env[62684]: DEBUG nova.compute.provider_tree [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2400.471751] env[62684]: DEBUG oslo_vmware.api [None req-04df2d7e-8dc6-41e1-b247-e68edd208baf tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053866, 'name': PowerOffVM_Task, 'duration_secs': 0.194726} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2400.472017] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-04df2d7e-8dc6-41e1-b247-e68edd208baf tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2400.472196] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-04df2d7e-8dc6-41e1-b247-e68edd208baf tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2400.472475] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8bc90e41-8186-403f-bf50-6755c6f46bf2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2400.734621] env[62684]: DEBUG nova.network.neutron [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2400.742093] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-04df2d7e-8dc6-41e1-b247-e68edd208baf tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2400.742333] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-04df2d7e-8dc6-41e1-b247-e68edd208baf tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2400.742528] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-04df2d7e-8dc6-41e1-b247-e68edd208baf tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Deleting the datastore file [datastore2] d540b43f-5bf9-47df-b319-97a1bae7ffc0 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2400.742796] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6aa78d9e-0327-47cd-b230-de8f3fb07696 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2400.749392] env[62684]: DEBUG oslo_vmware.api [None req-04df2d7e-8dc6-41e1-b247-e68edd208baf tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Waiting for the task: (returnval){ [ 2400.749392] env[62684]: value = "task-2053869" [ 2400.749392] env[62684]: _type = "Task" [ 2400.749392] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2400.759152] env[62684]: DEBUG oslo_vmware.api [None req-04df2d7e-8dc6-41e1-b247-e68edd208baf tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053869, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2400.766814] env[62684]: DEBUG oslo_vmware.api [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053867, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2400.813882] env[62684]: DEBUG nova.scheduler.client.report [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2400.977654] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2400.978086] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2401.046548] env[62684]: DEBUG nova.network.neutron [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Updating instance_info_cache with network_info: [{"id": "cc3f2e6e-2b58-4d5c-a8a3-5c8f5d710d31", "address": "fa:16:3e:06:e7:c3", "network": {"id": "8136a664-f757-43b3-a2fa-bacdf2e9566c", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1799567463-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "947e7359aaba456fa1763f4dc8e9d359", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cc30a16-f070-421c-964e-50c9aa32f17a", "external-id": "nsx-vlan-transportzone-424", "segmentation_id": 424, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc3f2e6e-2b", "ovs_interfaceid": "cc3f2e6e-2b58-4d5c-a8a3-5c8f5d710d31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2401.261650] env[62684]: DEBUG oslo_vmware.api [None req-04df2d7e-8dc6-41e1-b247-e68edd208baf tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053869, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142968} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2401.264912] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-04df2d7e-8dc6-41e1-b247-e68edd208baf tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2401.265130] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-04df2d7e-8dc6-41e1-b247-e68edd208baf tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2401.265339] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-04df2d7e-8dc6-41e1-b247-e68edd208baf tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2401.265655] env[62684]: INFO nova.compute.manager [None req-04df2d7e-8dc6-41e1-b247-e68edd208baf tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Took 1.32 seconds to destroy the instance on the hypervisor. [ 2401.265991] env[62684]: DEBUG oslo.service.loopingcall [None req-04df2d7e-8dc6-41e1-b247-e68edd208baf tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2401.266243] env[62684]: DEBUG nova.compute.manager [-] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2401.266344] env[62684]: DEBUG nova.network.neutron [-] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2401.274398] env[62684]: DEBUG oslo_vmware.api [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053867, 'name': ReconfigVM_Task, 'duration_secs': 0.787625} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2401.274698] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Reconfigured VM instance instance-00000070 to attach disk [datastore1] b88d9418-7e90-473e-bd9a-18bc398faad0/b88d9418-7e90-473e-bd9a-18bc398faad0.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2401.275564] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd429a2c-222a-4eed-a424-0e6056bdc28d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2401.293663] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb7dbbc0-e41f-4c4c-8cd9-fc2290edeee9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2401.311499] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-442dfd54-f87a-40b3-8a28-f27a52f47476 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2401.329735] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.237s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2401.329964] env[62684]: INFO nova.compute.manager [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Migrating [ 2401.336961] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ca6730e-bdc4-4996-a31e-42c39b2c4407 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2401.353344] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2401.353608] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e44e655b-7431-46ed-bae3-d916ab19ae9a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2401.360068] env[62684]: DEBUG oslo_vmware.api [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2401.360068] env[62684]: value = "task-2053870" [ 2401.360068] env[62684]: _type = "Task" [ 2401.360068] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2401.368977] env[62684]: DEBUG oslo_vmware.api [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053870, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2401.484445] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2401.484631] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Starting heal instance info cache {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2401.549524] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Releasing lock "refresh_cache-603b2c96-44f1-45a7-8209-b799662a3e42" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2401.549850] env[62684]: DEBUG nova.compute.manager [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Instance network_info: |[{"id": "cc3f2e6e-2b58-4d5c-a8a3-5c8f5d710d31", "address": "fa:16:3e:06:e7:c3", "network": {"id": "8136a664-f757-43b3-a2fa-bacdf2e9566c", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1799567463-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "947e7359aaba456fa1763f4dc8e9d359", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cc30a16-f070-421c-964e-50c9aa32f17a", "external-id": "nsx-vlan-transportzone-424", "segmentation_id": 424, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc3f2e6e-2b", "ovs_interfaceid": "cc3f2e6e-2b58-4d5c-a8a3-5c8f5d710d31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2401.550305] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:06:e7:c3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0cc30a16-f070-421c-964e-50c9aa32f17a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cc3f2e6e-2b58-4d5c-a8a3-5c8f5d710d31', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2401.557785] env[62684]: DEBUG oslo.service.loopingcall [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2401.558278] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2401.558521] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-469a59e2-35fa-408d-97ca-c04c5888a6a7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2401.578258] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2401.578258] env[62684]: value = "task-2053871" [ 2401.578258] env[62684]: _type = "Task" [ 2401.578258] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2401.586495] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053871, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2401.633957] env[62684]: DEBUG nova.compute.manager [req-377a1944-a3c6-497b-9c4c-993012268b79 req-b18309cf-e9f2-480e-9fea-6aa26241a513 service nova] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Received event network-changed-cc3f2e6e-2b58-4d5c-a8a3-5c8f5d710d31 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2401.634260] env[62684]: DEBUG nova.compute.manager [req-377a1944-a3c6-497b-9c4c-993012268b79 req-b18309cf-e9f2-480e-9fea-6aa26241a513 service nova] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Refreshing instance network info cache due to event network-changed-cc3f2e6e-2b58-4d5c-a8a3-5c8f5d710d31. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2401.634546] env[62684]: DEBUG oslo_concurrency.lockutils [req-377a1944-a3c6-497b-9c4c-993012268b79 req-b18309cf-e9f2-480e-9fea-6aa26241a513 service nova] Acquiring lock "refresh_cache-603b2c96-44f1-45a7-8209-b799662a3e42" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2401.634746] env[62684]: DEBUG oslo_concurrency.lockutils [req-377a1944-a3c6-497b-9c4c-993012268b79 req-b18309cf-e9f2-480e-9fea-6aa26241a513 service nova] Acquired lock "refresh_cache-603b2c96-44f1-45a7-8209-b799662a3e42" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2401.634929] env[62684]: DEBUG nova.network.neutron [req-377a1944-a3c6-497b-9c4c-993012268b79 req-b18309cf-e9f2-480e-9fea-6aa26241a513 service nova] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Refreshing network info cache for port cc3f2e6e-2b58-4d5c-a8a3-5c8f5d710d31 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2401.851658] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "refresh_cache-8d22d555-f837-4eb3-9474-c1434649584e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2401.851658] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquired lock "refresh_cache-8d22d555-f837-4eb3-9474-c1434649584e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2401.851658] env[62684]: DEBUG nova.network.neutron [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2401.871059] env[62684]: DEBUG oslo_vmware.api [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053870, 'name': PowerOnVM_Task, 'duration_secs': 0.364784} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2401.871350] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2402.034224] env[62684]: DEBUG nova.network.neutron [-] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2402.087459] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053871, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2402.346746] env[62684]: DEBUG nova.network.neutron [req-377a1944-a3c6-497b-9c4c-993012268b79 req-b18309cf-e9f2-480e-9fea-6aa26241a513 service nova] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Updated VIF entry in instance network info cache for port cc3f2e6e-2b58-4d5c-a8a3-5c8f5d710d31. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2402.347226] env[62684]: DEBUG nova.network.neutron [req-377a1944-a3c6-497b-9c4c-993012268b79 req-b18309cf-e9f2-480e-9fea-6aa26241a513 service nova] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Updating instance_info_cache with network_info: [{"id": "cc3f2e6e-2b58-4d5c-a8a3-5c8f5d710d31", "address": "fa:16:3e:06:e7:c3", "network": {"id": "8136a664-f757-43b3-a2fa-bacdf2e9566c", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1799567463-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "947e7359aaba456fa1763f4dc8e9d359", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cc30a16-f070-421c-964e-50c9aa32f17a", "external-id": "nsx-vlan-transportzone-424", "segmentation_id": 424, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc3f2e6e-2b", "ovs_interfaceid": "cc3f2e6e-2b58-4d5c-a8a3-5c8f5d710d31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2402.537154] env[62684]: INFO nova.compute.manager [-] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Took 1.27 seconds to deallocate network for instance. [ 2402.584974] env[62684]: DEBUG nova.network.neutron [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Updating instance_info_cache with network_info: [{"id": "83b1b299-d863-45f1-9a11-2ffa0e2bd291", "address": "fa:16:3e:4f:09:7f", "network": {"id": "e177c6d0-ddd5-4029-94af-c8f1b937dd9f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1344612161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.224", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27d04006afc747e19ad87238bfdbaad1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "171aeae0-6a27-44fc-bc3d-a2d5581fc702", "external-id": "nsx-vlan-transportzone-410", "segmentation_id": 410, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83b1b299-d8", "ovs_interfaceid": "83b1b299-d863-45f1-9a11-2ffa0e2bd291", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2402.589836] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053871, 'name': CreateVM_Task, 'duration_secs': 0.561309} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2402.590225] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2402.591972] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2402.591972] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2402.591972] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2402.591972] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ffdb098-0597-4645-bd17-ef9b71817cd3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2402.596820] env[62684]: DEBUG oslo_vmware.api [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2402.596820] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5250ea12-fede-a154-32d6-35444dbe16f1" [ 2402.596820] env[62684]: _type = "Task" [ 2402.596820] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2402.605058] env[62684]: DEBUG oslo_vmware.api [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5250ea12-fede-a154-32d6-35444dbe16f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2402.850085] env[62684]: DEBUG oslo_concurrency.lockutils [req-377a1944-a3c6-497b-9c4c-993012268b79 req-b18309cf-e9f2-480e-9fea-6aa26241a513 service nova] Releasing lock "refresh_cache-603b2c96-44f1-45a7-8209-b799662a3e42" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2402.850786] env[62684]: DEBUG nova.compute.manager [req-377a1944-a3c6-497b-9c4c-993012268b79 req-b18309cf-e9f2-480e-9fea-6aa26241a513 service nova] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Received event network-vif-deleted-664c9b72-448f-42d5-bb01-db8d2006bcb8 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2402.850786] env[62684]: INFO nova.compute.manager [req-377a1944-a3c6-497b-9c4c-993012268b79 req-b18309cf-e9f2-480e-9fea-6aa26241a513 service nova] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Neutron deleted interface 664c9b72-448f-42d5-bb01-db8d2006bcb8; detaching it from the instance and deleting it from the info cache [ 2402.850786] env[62684]: DEBUG nova.network.neutron [req-377a1944-a3c6-497b-9c4c-993012268b79 req-b18309cf-e9f2-480e-9fea-6aa26241a513 service nova] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2402.881136] env[62684]: INFO nova.compute.manager [None req-cfa67499-9f13-4244-bbd6-62a33dc13de8 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Updating instance to original state: 'active' [ 2403.043942] env[62684]: DEBUG oslo_concurrency.lockutils [None req-04df2d7e-8dc6-41e1-b247-e68edd208baf tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2403.044259] env[62684]: DEBUG oslo_concurrency.lockutils [None req-04df2d7e-8dc6-41e1-b247-e68edd208baf tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2403.044468] env[62684]: DEBUG nova.objects.instance [None req-04df2d7e-8dc6-41e1-b247-e68edd208baf tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Lazy-loading 'resources' on Instance uuid d540b43f-5bf9-47df-b319-97a1bae7ffc0 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2403.091422] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Releasing lock "refresh_cache-8d22d555-f837-4eb3-9474-c1434649584e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2403.106450] env[62684]: DEBUG oslo_vmware.api [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5250ea12-fede-a154-32d6-35444dbe16f1, 'name': SearchDatastore_Task, 'duration_secs': 0.050404} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2403.107332] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2403.107569] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2403.107805] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2403.107954] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2403.108171] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2403.108668] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fe37e35e-241a-4218-9d97-e0293b089df5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.119429] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2403.119590] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2403.120426] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-811a4156-d4a4-4e30-b09d-75304605935b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.126102] env[62684]: DEBUG oslo_vmware.api [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2403.126102] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5267200f-82ff-60c6-70a2-96d9d1dd26ed" [ 2403.126102] env[62684]: _type = "Task" [ 2403.126102] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2403.133654] env[62684]: DEBUG oslo_vmware.api [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5267200f-82ff-60c6-70a2-96d9d1dd26ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2403.354289] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2b3c8ae7-592e-432f-be54-28389f7c165a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.364215] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d50d4e99-2667-48b3-9fd1-d91af7080cac {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.394267] env[62684]: DEBUG nova.compute.manager [req-377a1944-a3c6-497b-9c4c-993012268b79 req-b18309cf-e9f2-480e-9fea-6aa26241a513 service nova] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Detach interface failed, port_id=664c9b72-448f-42d5-bb01-db8d2006bcb8, reason: Instance d540b43f-5bf9-47df-b319-97a1bae7ffc0 could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2403.638653] env[62684]: DEBUG oslo_vmware.api [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5267200f-82ff-60c6-70a2-96d9d1dd26ed, 'name': SearchDatastore_Task, 'duration_secs': 0.016877} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2403.639511] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1cf5aa49-6e68-4632-b7be-a72a80fa1741 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.647456] env[62684]: DEBUG oslo_vmware.api [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2403.647456] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ee1f03-967f-6a8e-577d-61a70371b10a" [ 2403.647456] env[62684]: _type = "Task" [ 2403.647456] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2403.654992] env[62684]: DEBUG oslo_vmware.api [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ee1f03-967f-6a8e-577d-61a70371b10a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2403.656432] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4745c1e2-c632-4b5a-956b-2da4d76be190 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.663166] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dd60bb8-d905-4efb-b340-181d9380b34b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.694820] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5228c20-ea2b-4159-b8b0-f6a6b52c896c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.700928] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39f30488-fb58-41e5-91ed-c3f1bdcce997 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.714155] env[62684]: DEBUG nova.compute.provider_tree [None req-04df2d7e-8dc6-41e1-b247-e68edd208baf tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2404.157602] env[62684]: DEBUG oslo_vmware.api [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ee1f03-967f-6a8e-577d-61a70371b10a, 'name': SearchDatastore_Task, 'duration_secs': 0.00968} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2404.157963] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2404.158106] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 603b2c96-44f1-45a7-8209-b799662a3e42/603b2c96-44f1-45a7-8209-b799662a3e42.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2404.158395] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f1de4fd7-8fc7-4131-8f29-f9dcb1f98e07 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2404.165614] env[62684]: DEBUG oslo_vmware.api [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2404.165614] env[62684]: value = "task-2053872" [ 2404.165614] env[62684]: _type = "Task" [ 2404.165614] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2404.173338] env[62684]: DEBUG oslo_vmware.api [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053872, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2404.217635] env[62684]: DEBUG nova.scheduler.client.report [None req-04df2d7e-8dc6-41e1-b247-e68edd208baf tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2404.473601] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f5bc3fa3-0ac9-4757-a51c-8fedc349d122 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "b88d9418-7e90-473e-bd9a-18bc398faad0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2404.473601] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f5bc3fa3-0ac9-4757-a51c-8fedc349d122 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "b88d9418-7e90-473e-bd9a-18bc398faad0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2404.473837] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f5bc3fa3-0ac9-4757-a51c-8fedc349d122 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "b88d9418-7e90-473e-bd9a-18bc398faad0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2404.474032] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f5bc3fa3-0ac9-4757-a51c-8fedc349d122 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "b88d9418-7e90-473e-bd9a-18bc398faad0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2404.474226] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f5bc3fa3-0ac9-4757-a51c-8fedc349d122 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "b88d9418-7e90-473e-bd9a-18bc398faad0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2404.476658] env[62684]: INFO nova.compute.manager [None req-f5bc3fa3-0ac9-4757-a51c-8fedc349d122 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Terminating instance [ 2404.478530] env[62684]: DEBUG nova.compute.manager [None req-f5bc3fa3-0ac9-4757-a51c-8fedc349d122 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2404.478733] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f5bc3fa3-0ac9-4757-a51c-8fedc349d122 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2404.479655] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52e18784-4abb-49f2-88fc-593eda3015a5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2404.488184] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5bc3fa3-0ac9-4757-a51c-8fedc349d122 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2404.488471] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-32fb6564-109f-44d9-a253-1e7a0b66cb7a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2404.495507] env[62684]: DEBUG oslo_vmware.api [None req-f5bc3fa3-0ac9-4757-a51c-8fedc349d122 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2404.495507] env[62684]: value = "task-2053873" [ 2404.495507] env[62684]: _type = "Task" [ 2404.495507] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2404.505396] env[62684]: DEBUG oslo_vmware.api [None req-f5bc3fa3-0ac9-4757-a51c-8fedc349d122 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053873, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2404.605061] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c648829e-11b6-448b-b38a-98f5ac762d66 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2404.622895] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Updating instance '8d22d555-f837-4eb3-9474-c1434649584e' progress to 0 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2404.676135] env[62684]: DEBUG oslo_vmware.api [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053872, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.430753} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2404.676535] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 603b2c96-44f1-45a7-8209-b799662a3e42/603b2c96-44f1-45a7-8209-b799662a3e42.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2404.676847] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2404.677181] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-30f2eeda-fd15-40b5-9ee3-a3abf69feab0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2404.684153] env[62684]: DEBUG oslo_vmware.api [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2404.684153] env[62684]: value = "task-2053874" [ 2404.684153] env[62684]: _type = "Task" [ 2404.684153] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2404.692320] env[62684]: DEBUG oslo_vmware.api [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053874, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2404.722928] env[62684]: DEBUG oslo_concurrency.lockutils [None req-04df2d7e-8dc6-41e1-b247-e68edd208baf tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.678s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2404.740762] env[62684]: INFO nova.scheduler.client.report [None req-04df2d7e-8dc6-41e1-b247-e68edd208baf tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Deleted allocations for instance d540b43f-5bf9-47df-b319-97a1bae7ffc0 [ 2405.006267] env[62684]: DEBUG oslo_vmware.api [None req-f5bc3fa3-0ac9-4757-a51c-8fedc349d122 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053873, 'name': PowerOffVM_Task, 'duration_secs': 0.243773} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2405.007022] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5bc3fa3-0ac9-4757-a51c-8fedc349d122 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2405.007022] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f5bc3fa3-0ac9-4757-a51c-8fedc349d122 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2405.007149] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ff779abf-ab5b-4f4b-9ddf-e7c4fb3a255a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2405.128449] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2405.128779] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-002658d1-b29d-481f-9fd5-94ab384b9a3d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2405.136111] env[62684]: DEBUG oslo_vmware.api [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2405.136111] env[62684]: value = "task-2053876" [ 2405.136111] env[62684]: _type = "Task" [ 2405.136111] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2405.144555] env[62684]: DEBUG oslo_vmware.api [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053876, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2405.193915] env[62684]: DEBUG oslo_vmware.api [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053874, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063924} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2405.195028] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2405.195534] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69e1ad93-a3e1-42d3-a310-980753196aad {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2405.217289] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] 603b2c96-44f1-45a7-8209-b799662a3e42/603b2c96-44f1-45a7-8209-b799662a3e42.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2405.217580] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f70b1b66-b4de-4d6b-8303-7a6ce6aeb366 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2405.236447] env[62684]: DEBUG oslo_vmware.api [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2405.236447] env[62684]: value = "task-2053877" [ 2405.236447] env[62684]: _type = "Task" [ 2405.236447] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2405.248062] env[62684]: DEBUG oslo_vmware.api [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053877, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2405.248536] env[62684]: DEBUG oslo_concurrency.lockutils [None req-04df2d7e-8dc6-41e1-b247-e68edd208baf tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Lock "d540b43f-5bf9-47df-b319-97a1bae7ffc0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.306s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2405.392698] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f5bc3fa3-0ac9-4757-a51c-8fedc349d122 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2405.393063] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f5bc3fa3-0ac9-4757-a51c-8fedc349d122 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2405.393239] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5bc3fa3-0ac9-4757-a51c-8fedc349d122 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Deleting the datastore file [datastore1] b88d9418-7e90-473e-bd9a-18bc398faad0 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2405.393531] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1e0a1eb6-7780-43f4-a2c9-4ce6b46beb97 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2405.401060] env[62684]: DEBUG oslo_vmware.api [None req-f5bc3fa3-0ac9-4757-a51c-8fedc349d122 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2405.401060] env[62684]: value = "task-2053878" [ 2405.401060] env[62684]: _type = "Task" [ 2405.401060] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2405.408831] env[62684]: DEBUG oslo_vmware.api [None req-f5bc3fa3-0ac9-4757-a51c-8fedc349d122 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053878, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2405.426439] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a21e2ccf-b375-4d71-a07b-9a2de14b5594 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Acquiring lock "ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2405.426732] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a21e2ccf-b375-4d71-a07b-9a2de14b5594 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Lock "ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2405.427305] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a21e2ccf-b375-4d71-a07b-9a2de14b5594 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Acquiring lock "ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2405.427305] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a21e2ccf-b375-4d71-a07b-9a2de14b5594 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Lock "ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2405.427506] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a21e2ccf-b375-4d71-a07b-9a2de14b5594 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Lock "ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2405.429491] env[62684]: INFO nova.compute.manager [None req-a21e2ccf-b375-4d71-a07b-9a2de14b5594 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Terminating instance [ 2405.431257] env[62684]: DEBUG nova.compute.manager [None req-a21e2ccf-b375-4d71-a07b-9a2de14b5594 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2405.431507] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a21e2ccf-b375-4d71-a07b-9a2de14b5594 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2405.432316] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ec58957-6f14-4c7e-a61b-7af4f1bc52cc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2405.439821] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a21e2ccf-b375-4d71-a07b-9a2de14b5594 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2405.439976] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1ce51250-f730-40b3-b9a5-1ebf45215bc8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2405.446927] env[62684]: DEBUG oslo_vmware.api [None req-a21e2ccf-b375-4d71-a07b-9a2de14b5594 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Waiting for the task: (returnval){ [ 2405.446927] env[62684]: value = "task-2053879" [ 2405.446927] env[62684]: _type = "Task" [ 2405.446927] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2405.453639] env[62684]: DEBUG oslo_vmware.api [None req-a21e2ccf-b375-4d71-a07b-9a2de14b5594 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053879, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2405.645575] env[62684]: DEBUG oslo_vmware.api [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053876, 'name': PowerOffVM_Task, 'duration_secs': 0.168241} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2405.645844] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2405.646043] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Updating instance '8d22d555-f837-4eb3-9474-c1434649584e' progress to 17 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2405.746197] env[62684]: DEBUG oslo_vmware.api [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053877, 'name': ReconfigVM_Task, 'duration_secs': 0.289277} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2405.746473] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Reconfigured VM instance instance-00000077 to attach disk [datastore1] 603b2c96-44f1-45a7-8209-b799662a3e42/603b2c96-44f1-45a7-8209-b799662a3e42.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2405.747103] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c4489bc7-8e48-4f9a-b332-e60c5f2d98e8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2405.753261] env[62684]: DEBUG oslo_vmware.api [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2405.753261] env[62684]: value = "task-2053880" [ 2405.753261] env[62684]: _type = "Task" [ 2405.753261] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2405.761211] env[62684]: DEBUG oslo_vmware.api [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053880, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2405.910767] env[62684]: DEBUG oslo_vmware.api [None req-f5bc3fa3-0ac9-4757-a51c-8fedc349d122 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053878, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146301} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2405.911036] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5bc3fa3-0ac9-4757-a51c-8fedc349d122 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2405.911229] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f5bc3fa3-0ac9-4757-a51c-8fedc349d122 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2405.911411] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f5bc3fa3-0ac9-4757-a51c-8fedc349d122 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2405.911666] env[62684]: INFO nova.compute.manager [None req-f5bc3fa3-0ac9-4757-a51c-8fedc349d122 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Took 1.43 seconds to destroy the instance on the hypervisor. [ 2405.911907] env[62684]: DEBUG oslo.service.loopingcall [None req-f5bc3fa3-0ac9-4757-a51c-8fedc349d122 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2405.912114] env[62684]: DEBUG nova.compute.manager [-] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2405.912212] env[62684]: DEBUG nova.network.neutron [-] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2405.956583] env[62684]: DEBUG oslo_vmware.api [None req-a21e2ccf-b375-4d71-a07b-9a2de14b5594 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053879, 'name': PowerOffVM_Task, 'duration_secs': 0.265323} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2405.956851] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a21e2ccf-b375-4d71-a07b-9a2de14b5594 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2405.957039] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a21e2ccf-b375-4d71-a07b-9a2de14b5594 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2405.957597] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-589969f7-a4ea-401e-ae81-b361ce1796ec {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.009785] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Didn't find any instances for network info cache update. {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 2406.010013] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2406.010191] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2406.010340] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2406.010491] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2406.010635] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2406.010778] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2406.010908] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62684) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2406.011069] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2406.034736] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a21e2ccf-b375-4d71-a07b-9a2de14b5594 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2406.034969] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a21e2ccf-b375-4d71-a07b-9a2de14b5594 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2406.035182] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-a21e2ccf-b375-4d71-a07b-9a2de14b5594 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Deleting the datastore file [datastore2] ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2406.035448] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2ccc1dba-a335-42a7-89ea-86d82284b878 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.042447] env[62684]: DEBUG oslo_vmware.api [None req-a21e2ccf-b375-4d71-a07b-9a2de14b5594 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Waiting for the task: (returnval){ [ 2406.042447] env[62684]: value = "task-2053882" [ 2406.042447] env[62684]: _type = "Task" [ 2406.042447] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2406.049559] env[62684]: DEBUG oslo_vmware.api [None req-a21e2ccf-b375-4d71-a07b-9a2de14b5594 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053882, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2406.153145] env[62684]: DEBUG nova.virt.hardware [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2406.153486] env[62684]: DEBUG nova.virt.hardware [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2406.153711] env[62684]: DEBUG nova.virt.hardware [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2406.153969] env[62684]: DEBUG nova.virt.hardware [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2406.154329] env[62684]: DEBUG nova.virt.hardware [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2406.154636] env[62684]: DEBUG nova.virt.hardware [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2406.154990] env[62684]: DEBUG nova.virt.hardware [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2406.155295] env[62684]: DEBUG nova.virt.hardware [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2406.155550] env[62684]: DEBUG nova.virt.hardware [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2406.155843] env[62684]: DEBUG nova.virt.hardware [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2406.156176] env[62684]: DEBUG nova.virt.hardware [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2406.162462] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1e8fab35-16b4-44af-80eb-e17def9b6de5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.180116] env[62684]: DEBUG oslo_vmware.api [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2406.180116] env[62684]: value = "task-2053883" [ 2406.180116] env[62684]: _type = "Task" [ 2406.180116] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2406.188252] env[62684]: DEBUG oslo_vmware.api [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053883, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2406.263169] env[62684]: DEBUG oslo_vmware.api [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053880, 'name': Rename_Task, 'duration_secs': 0.129264} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2406.263496] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2406.263709] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-181aa7a1-dcf9-4361-8d9f-85551d9cda6c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.269869] env[62684]: DEBUG oslo_vmware.api [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2406.269869] env[62684]: value = "task-2053884" [ 2406.269869] env[62684]: _type = "Task" [ 2406.269869] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2406.279538] env[62684]: DEBUG oslo_vmware.api [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053884, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2406.369622] env[62684]: DEBUG nova.compute.manager [req-f6997e1a-f936-4a21-aba0-089bc6f83d6a req-08e084e3-fc1f-4891-8297-4feb3772fe05 service nova] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Received event network-vif-deleted-7e4b9e76-bf05-4ee7-b25c-922484094be0 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2406.369947] env[62684]: INFO nova.compute.manager [req-f6997e1a-f936-4a21-aba0-089bc6f83d6a req-08e084e3-fc1f-4891-8297-4feb3772fe05 service nova] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Neutron deleted interface 7e4b9e76-bf05-4ee7-b25c-922484094be0; detaching it from the instance and deleting it from the info cache [ 2406.370056] env[62684]: DEBUG nova.network.neutron [req-f6997e1a-f936-4a21-aba0-089bc6f83d6a req-08e084e3-fc1f-4891-8297-4feb3772fe05 service nova] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2406.514160] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2406.514405] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2406.514568] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2406.514761] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62684) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2406.515717] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4083686-8dbb-486f-b695-3a0d95d34671 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.524859] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1e7cac5-7592-4a93-a3cc-8a1d94716aac {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.540037] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e482ab0-ae52-4f79-b5cb-cb75e3a48dec {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.549168] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1ef93ef-2512-4d5e-827a-5411df3eb137 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.555167] env[62684]: DEBUG oslo_vmware.api [None req-a21e2ccf-b375-4d71-a07b-9a2de14b5594 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Task: {'id': task-2053882, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136967} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2406.555794] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-a21e2ccf-b375-4d71-a07b-9a2de14b5594 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2406.555983] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a21e2ccf-b375-4d71-a07b-9a2de14b5594 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2406.556473] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a21e2ccf-b375-4d71-a07b-9a2de14b5594 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2406.556473] env[62684]: INFO nova.compute.manager [None req-a21e2ccf-b375-4d71-a07b-9a2de14b5594 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Took 1.12 seconds to destroy the instance on the hypervisor. [ 2406.556615] env[62684]: DEBUG oslo.service.loopingcall [None req-a21e2ccf-b375-4d71-a07b-9a2de14b5594 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2406.556794] env[62684]: DEBUG nova.compute.manager [-] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2406.556891] env[62684]: DEBUG nova.network.neutron [-] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2406.586424] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180560MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=62684) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2406.586424] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2406.586424] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2406.690299] env[62684]: DEBUG oslo_vmware.api [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053883, 'name': ReconfigVM_Task, 'duration_secs': 0.172794} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2406.690620] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Updating instance '8d22d555-f837-4eb3-9474-c1434649584e' progress to 33 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2406.780289] env[62684]: DEBUG oslo_vmware.api [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053884, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2406.844744] env[62684]: DEBUG nova.network.neutron [-] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2406.874036] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dfd31890-23f4-48e8-8a88-a40e5d33e09f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.882456] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-170440a4-b68f-41fb-bd01-e46c8420ebd7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.909571] env[62684]: DEBUG nova.compute.manager [req-f6997e1a-f936-4a21-aba0-089bc6f83d6a req-08e084e3-fc1f-4891-8297-4feb3772fe05 service nova] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Detach interface failed, port_id=7e4b9e76-bf05-4ee7-b25c-922484094be0, reason: Instance b88d9418-7e90-473e-bd9a-18bc398faad0 could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2407.197619] env[62684]: DEBUG nova.virt.hardware [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2407.197899] env[62684]: DEBUG nova.virt.hardware [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2407.198080] env[62684]: DEBUG nova.virt.hardware [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2407.198275] env[62684]: DEBUG nova.virt.hardware [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2407.198425] env[62684]: DEBUG nova.virt.hardware [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2407.198575] env[62684]: DEBUG nova.virt.hardware [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2407.198781] env[62684]: DEBUG nova.virt.hardware [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2407.198947] env[62684]: DEBUG nova.virt.hardware [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2407.199136] env[62684]: DEBUG nova.virt.hardware [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2407.199305] env[62684]: DEBUG nova.virt.hardware [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2407.199482] env[62684]: DEBUG nova.virt.hardware [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2407.204780] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Reconfiguring VM instance instance-00000076 to detach disk 2000 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2407.205095] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5700341c-07a7-46d3-807d-f39dbe6d9af6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.225293] env[62684]: DEBUG oslo_vmware.api [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2407.225293] env[62684]: value = "task-2053885" [ 2407.225293] env[62684]: _type = "Task" [ 2407.225293] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2407.234740] env[62684]: DEBUG oslo_vmware.api [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053885, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2407.281102] env[62684]: DEBUG oslo_vmware.api [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053884, 'name': PowerOnVM_Task, 'duration_secs': 0.677968} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2407.281414] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2407.281701] env[62684]: INFO nova.compute.manager [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Took 7.87 seconds to spawn the instance on the hypervisor. [ 2407.281908] env[62684]: DEBUG nova.compute.manager [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2407.282747] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72c4326a-75a2-4a76-b0f2-d4d824a62c5b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.323661] env[62684]: DEBUG nova.network.neutron [-] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2407.347332] env[62684]: INFO nova.compute.manager [-] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Took 1.43 seconds to deallocate network for instance. [ 2407.593266] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Applying migration context for instance 8d22d555-f837-4eb3-9474-c1434649584e as it has an incoming, in-progress migration 6a9e0a6e-8ad9-4b93-855d-75cb80c13130. Migration status is migrating {{(pid=62684) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 2407.594410] env[62684]: INFO nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Updating resource usage from migration 6a9e0a6e-8ad9-4b93-855d-75cb80c13130 [ 2407.622433] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 0156d807-1ab4-482f-91d1-172bf32bf23c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2407.622615] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance b0ddbec0-d578-46df-93fd-9d38c939bd77 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2407.622744] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2407.622858] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance b88d9418-7e90-473e-bd9a-18bc398faad0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2407.622970] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 603b2c96-44f1-45a7-8209-b799662a3e42 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2407.623099] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Migration 6a9e0a6e-8ad9-4b93-855d-75cb80c13130 is active on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 2407.623214] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 8d22d555-f837-4eb3-9474-c1434649584e actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2407.623404] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2407.623539] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1920MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2407.712228] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b6b0674-070d-4cec-8549-98ffa6cf9bf0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.719504] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9512ce9-294b-47dd-bba1-57b3bee21624 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.753229] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94b78d98-59a6-409d-80b0-85ee3eae6ad5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.763405] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8936ff5-438e-466c-931e-6299d1ddfa8a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.767054] env[62684]: DEBUG oslo_vmware.api [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053885, 'name': ReconfigVM_Task, 'duration_secs': 0.196226} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2407.767328] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Reconfigured VM instance instance-00000076 to detach disk 2000 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2407.768353] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa7d9403-0c88-48f7-9289-ac7d46afc3cb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.778109] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2407.798291] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] volume-c148a6a9-f313-4a0d-8466-261a9903a3c8/volume-c148a6a9-f313-4a0d-8466-261a9903a3c8.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2407.802869] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bb8cdc99-cb67-457b-b563-e6d220452b59 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.818524] env[62684]: INFO nova.compute.manager [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Took 12.69 seconds to build instance. [ 2407.824491] env[62684]: DEBUG oslo_vmware.api [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2407.824491] env[62684]: value = "task-2053886" [ 2407.824491] env[62684]: _type = "Task" [ 2407.824491] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2407.827936] env[62684]: INFO nova.compute.manager [-] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Took 1.27 seconds to deallocate network for instance. [ 2407.835386] env[62684]: DEBUG oslo_vmware.api [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053886, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2407.853594] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f5bc3fa3-0ac9-4757-a51c-8fedc349d122 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2408.283020] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2408.320414] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e511d2e7-d4f6-4407-a8fa-31865aafa2cb tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "603b2c96-44f1-45a7-8209-b799662a3e42" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.208s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2408.335694] env[62684]: DEBUG oslo_vmware.api [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053886, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2408.336873] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a21e2ccf-b375-4d71-a07b-9a2de14b5594 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2408.394681] env[62684]: DEBUG nova.compute.manager [req-92eef097-bd62-4ebd-ada4-9824cf448ae2 req-603aabda-f49c-485a-ab0a-f29aa1c80e42 service nova] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Received event network-vif-deleted-b5464c95-056f-4730-8974-808275aae0f2 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2408.531744] env[62684]: DEBUG nova.compute.manager [req-01ad1108-46cd-44f2-9ebd-2e386d461f72 req-381a4771-b971-4c60-98e9-f5254f7fc64c service nova] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Received event network-changed-cc3f2e6e-2b58-4d5c-a8a3-5c8f5d710d31 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2408.531949] env[62684]: DEBUG nova.compute.manager [req-01ad1108-46cd-44f2-9ebd-2e386d461f72 req-381a4771-b971-4c60-98e9-f5254f7fc64c service nova] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Refreshing instance network info cache due to event network-changed-cc3f2e6e-2b58-4d5c-a8a3-5c8f5d710d31. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2408.532231] env[62684]: DEBUG oslo_concurrency.lockutils [req-01ad1108-46cd-44f2-9ebd-2e386d461f72 req-381a4771-b971-4c60-98e9-f5254f7fc64c service nova] Acquiring lock "refresh_cache-603b2c96-44f1-45a7-8209-b799662a3e42" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2408.532400] env[62684]: DEBUG oslo_concurrency.lockutils [req-01ad1108-46cd-44f2-9ebd-2e386d461f72 req-381a4771-b971-4c60-98e9-f5254f7fc64c service nova] Acquired lock "refresh_cache-603b2c96-44f1-45a7-8209-b799662a3e42" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2408.532575] env[62684]: DEBUG nova.network.neutron [req-01ad1108-46cd-44f2-9ebd-2e386d461f72 req-381a4771-b971-4c60-98e9-f5254f7fc64c service nova] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Refreshing network info cache for port cc3f2e6e-2b58-4d5c-a8a3-5c8f5d710d31 {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2408.786134] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62684) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2408.786360] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.202s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2408.786656] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f5bc3fa3-0ac9-4757-a51c-8fedc349d122 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.933s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2408.786887] env[62684]: DEBUG nova.objects.instance [None req-f5bc3fa3-0ac9-4757-a51c-8fedc349d122 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lazy-loading 'resources' on Instance uuid b88d9418-7e90-473e-bd9a-18bc398faad0 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2408.787970] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2408.788128] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Cleaning up deleted instances with incomplete migration {{(pid=62684) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 2408.835241] env[62684]: DEBUG oslo_vmware.api [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053886, 'name': ReconfigVM_Task, 'duration_secs': 0.958512} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2408.835525] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Reconfigured VM instance instance-00000076 to attach disk [datastore1] volume-c148a6a9-f313-4a0d-8466-261a9903a3c8/volume-c148a6a9-f313-4a0d-8466-261a9903a3c8.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2408.835795] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Updating instance '8d22d555-f837-4eb3-9474-c1434649584e' progress to 50 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2409.237340] env[62684]: DEBUG nova.network.neutron [req-01ad1108-46cd-44f2-9ebd-2e386d461f72 req-381a4771-b971-4c60-98e9-f5254f7fc64c service nova] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Updated VIF entry in instance network info cache for port cc3f2e6e-2b58-4d5c-a8a3-5c8f5d710d31. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2409.237727] env[62684]: DEBUG nova.network.neutron [req-01ad1108-46cd-44f2-9ebd-2e386d461f72 req-381a4771-b971-4c60-98e9-f5254f7fc64c service nova] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Updating instance_info_cache with network_info: [{"id": "cc3f2e6e-2b58-4d5c-a8a3-5c8f5d710d31", "address": "fa:16:3e:06:e7:c3", "network": {"id": "8136a664-f757-43b3-a2fa-bacdf2e9566c", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1799567463-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.211", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "947e7359aaba456fa1763f4dc8e9d359", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cc30a16-f070-421c-964e-50c9aa32f17a", "external-id": "nsx-vlan-transportzone-424", "segmentation_id": 424, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc3f2e6e-2b", "ovs_interfaceid": "cc3f2e6e-2b58-4d5c-a8a3-5c8f5d710d31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2409.343723] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d809510-2d40-4e77-8a03-b26ed0369007 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2409.366562] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c86e3b4-c9a3-4062-9e35-a4d0b9a4ddeb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2409.384553] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Updating instance '8d22d555-f837-4eb3-9474-c1434649584e' progress to 67 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2409.410586] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75d5fb8d-fd49-493d-aa94-5483e37f2d11 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2409.418209] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d5616e2-5dc8-4f6e-b91e-a8d49e78ed4e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2409.449762] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0452be17-fca2-4777-b64f-3c6577e586d4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2409.457823] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64ed89a8-a0c7-4f61-ad28-4395077b2d92 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2409.471183] env[62684]: DEBUG nova.compute.provider_tree [None req-f5bc3fa3-0ac9-4757-a51c-8fedc349d122 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2409.740300] env[62684]: DEBUG oslo_concurrency.lockutils [req-01ad1108-46cd-44f2-9ebd-2e386d461f72 req-381a4771-b971-4c60-98e9-f5254f7fc64c service nova] Releasing lock "refresh_cache-603b2c96-44f1-45a7-8209-b799662a3e42" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2409.974578] env[62684]: DEBUG nova.scheduler.client.report [None req-f5bc3fa3-0ac9-4757-a51c-8fedc349d122 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2410.479767] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f5bc3fa3-0ac9-4757-a51c-8fedc349d122 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.693s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2410.482210] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a21e2ccf-b375-4d71-a07b-9a2de14b5594 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.145s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2410.482448] env[62684]: DEBUG nova.objects.instance [None req-a21e2ccf-b375-4d71-a07b-9a2de14b5594 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Lazy-loading 'resources' on Instance uuid ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2410.498670] env[62684]: INFO nova.scheduler.client.report [None req-f5bc3fa3-0ac9-4757-a51c-8fedc349d122 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Deleted allocations for instance b88d9418-7e90-473e-bd9a-18bc398faad0 [ 2411.006545] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f5bc3fa3-0ac9-4757-a51c-8fedc349d122 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "b88d9418-7e90-473e-bd9a-18bc398faad0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.533s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2411.034098] env[62684]: DEBUG nova.network.neutron [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Port 83b1b299-d863-45f1-9a11-2ffa0e2bd291 binding to destination host cpu-1 is already ACTIVE {{(pid=62684) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2411.074613] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f775e83-1fc1-4de3-8116-34d1a01d3ae0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.082442] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6deee09-5240-4868-98bf-21c85d82f0fc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.113259] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-747673da-a30e-437d-8793-c1131a975e4f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.120693] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90bbf453-386b-4380-a4cb-cebf8cd05f7b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.133639] env[62684]: DEBUG nova.compute.provider_tree [None req-a21e2ccf-b375-4d71-a07b-9a2de14b5594 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2411.637461] env[62684]: DEBUG nova.scheduler.client.report [None req-a21e2ccf-b375-4d71-a07b-9a2de14b5594 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2412.050467] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "8d22d555-f837-4eb3-9474-c1434649584e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2412.050710] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "8d22d555-f837-4eb3-9474-c1434649584e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2412.050890] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "8d22d555-f837-4eb3-9474-c1434649584e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2412.142082] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a21e2ccf-b375-4d71-a07b-9a2de14b5594 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.660s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2412.158652] env[62684]: INFO nova.scheduler.client.report [None req-a21e2ccf-b375-4d71-a07b-9a2de14b5594 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Deleted allocations for instance ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5 [ 2412.519950] env[62684]: DEBUG oslo_concurrency.lockutils [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "4081f322-a854-475a-9a66-3d573128f39d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2412.520212] env[62684]: DEBUG oslo_concurrency.lockutils [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "4081f322-a854-475a-9a66-3d573128f39d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2412.665943] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a21e2ccf-b375-4d71-a07b-9a2de14b5594 tempest-ListServerFiltersTestJSON-503385427 tempest-ListServerFiltersTestJSON-503385427-project-member] Lock "ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.239s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2412.995582] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2412.996052] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Getting list of instances from cluster (obj){ [ 2412.996052] env[62684]: value = "domain-c8" [ 2412.996052] env[62684]: _type = "ClusterComputeResource" [ 2412.996052] env[62684]: } {{(pid=62684) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 2412.997143] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21f8fa45-7c5c-40a7-a664-64c51c16a1fb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2413.012406] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Got total of 4 instances {{(pid=62684) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 2413.022573] env[62684]: DEBUG nova.compute.manager [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2413.100408] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "refresh_cache-8d22d555-f837-4eb3-9474-c1434649584e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2413.100617] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquired lock "refresh_cache-8d22d555-f837-4eb3-9474-c1434649584e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2413.100806] env[62684]: DEBUG nova.network.neutron [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2413.545207] env[62684]: DEBUG oslo_concurrency.lockutils [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2413.545490] env[62684]: DEBUG oslo_concurrency.lockutils [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2413.547541] env[62684]: INFO nova.compute.claims [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2413.818729] env[62684]: DEBUG nova.network.neutron [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Updating instance_info_cache with network_info: [{"id": "83b1b299-d863-45f1-9a11-2ffa0e2bd291", "address": "fa:16:3e:4f:09:7f", "network": {"id": "e177c6d0-ddd5-4029-94af-c8f1b937dd9f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1344612161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.224", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27d04006afc747e19ad87238bfdbaad1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "171aeae0-6a27-44fc-bc3d-a2d5581fc702", "external-id": "nsx-vlan-transportzone-410", "segmentation_id": 410, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83b1b299-d8", "ovs_interfaceid": "83b1b299-d863-45f1-9a11-2ffa0e2bd291", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2414.321785] env[62684]: DEBUG oslo_concurrency.lockutils [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Releasing lock "refresh_cache-8d22d555-f837-4eb3-9474-c1434649584e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2414.630011] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b3ea26e-d0c0-4f01-a8db-6359bfc6b492 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2414.637937] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebefdf4f-1ce9-48ca-97c8-c726b9666edb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2414.668663] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e122551-91c8-4b9b-a6d5-b84162901b56 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2414.675844] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da531741-8d94-4b3e-ae37-d4e586b90ca2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2414.688796] env[62684]: DEBUG nova.compute.provider_tree [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2414.831144] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf851309-aa0a-4acd-9a56-f6be89abd48b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2414.838444] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-553bba19-6011-4d5d-837e-98ec0b6c43f6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2415.192265] env[62684]: DEBUG nova.scheduler.client.report [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2415.697230] env[62684]: DEBUG oslo_concurrency.lockutils [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.151s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2415.697737] env[62684]: DEBUG nova.compute.manager [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2415.932157] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-526c3355-6634-4b8a-9ba1-cd64428bc505 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2415.950990] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7389cd84-d874-497e-8ecd-31d9e4493b03 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2415.958042] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Updating instance '8d22d555-f837-4eb3-9474-c1434649584e' progress to 83 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2416.203867] env[62684]: DEBUG nova.compute.utils [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2416.205281] env[62684]: DEBUG nova.compute.manager [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2416.205448] env[62684]: DEBUG nova.network.neutron [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2416.256221] env[62684]: DEBUG nova.policy [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e3a532747bda4c7e8aa2892b424a47ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '263c101fcc5e493789b79dfd1ba97cc0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2416.465008] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2416.465355] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9ef40a7b-4043-42e7-9710-8af07a60610a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2416.472561] env[62684]: DEBUG oslo_vmware.api [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2416.472561] env[62684]: value = "task-2053887" [ 2416.472561] env[62684]: _type = "Task" [ 2416.472561] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2416.481448] env[62684]: DEBUG oslo_vmware.api [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053887, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2416.530131] env[62684]: DEBUG nova.network.neutron [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Successfully created port: c3974c79-c5f4-4ea4-88b0-b0e0a4e63b1f {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2416.709427] env[62684]: DEBUG nova.compute.manager [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2416.982112] env[62684]: DEBUG oslo_vmware.api [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053887, 'name': PowerOnVM_Task, 'duration_secs': 0.376034} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2416.982503] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2416.982503] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-33b5911b-cf40-40db-a256-26db0b5235a9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Updating instance '8d22d555-f837-4eb3-9474-c1434649584e' progress to 100 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2417.719655] env[62684]: DEBUG nova.compute.manager [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2417.744502] env[62684]: DEBUG nova.virt.hardware [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2417.744801] env[62684]: DEBUG nova.virt.hardware [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2417.744966] env[62684]: DEBUG nova.virt.hardware [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2417.745175] env[62684]: DEBUG nova.virt.hardware [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2417.745330] env[62684]: DEBUG nova.virt.hardware [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2417.745492] env[62684]: DEBUG nova.virt.hardware [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2417.745981] env[62684]: DEBUG nova.virt.hardware [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2417.745981] env[62684]: DEBUG nova.virt.hardware [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2417.746121] env[62684]: DEBUG nova.virt.hardware [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2417.746243] env[62684]: DEBUG nova.virt.hardware [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2417.746420] env[62684]: DEBUG nova.virt.hardware [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2417.747286] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dd2825b-1167-49a3-aefa-e161ccf7b4e4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2417.755679] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd7ddd7c-1d19-4105-a701-6953e941c8ab {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2418.924882] env[62684]: DEBUG nova.compute.manager [req-0a2ea267-f183-4711-aaa7-46cd6cc6364b req-04f1a4d9-8b9b-4292-9ad1-a814f8895cc2 service nova] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Received event network-vif-plugged-c3974c79-c5f4-4ea4-88b0-b0e0a4e63b1f {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2418.925158] env[62684]: DEBUG oslo_concurrency.lockutils [req-0a2ea267-f183-4711-aaa7-46cd6cc6364b req-04f1a4d9-8b9b-4292-9ad1-a814f8895cc2 service nova] Acquiring lock "4081f322-a854-475a-9a66-3d573128f39d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2418.925405] env[62684]: DEBUG oslo_concurrency.lockutils [req-0a2ea267-f183-4711-aaa7-46cd6cc6364b req-04f1a4d9-8b9b-4292-9ad1-a814f8895cc2 service nova] Lock "4081f322-a854-475a-9a66-3d573128f39d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2418.925609] env[62684]: DEBUG oslo_concurrency.lockutils [req-0a2ea267-f183-4711-aaa7-46cd6cc6364b req-04f1a4d9-8b9b-4292-9ad1-a814f8895cc2 service nova] Lock "4081f322-a854-475a-9a66-3d573128f39d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2418.925822] env[62684]: DEBUG nova.compute.manager [req-0a2ea267-f183-4711-aaa7-46cd6cc6364b req-04f1a4d9-8b9b-4292-9ad1-a814f8895cc2 service nova] [instance: 4081f322-a854-475a-9a66-3d573128f39d] No waiting events found dispatching network-vif-plugged-c3974c79-c5f4-4ea4-88b0-b0e0a4e63b1f {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2418.926043] env[62684]: WARNING nova.compute.manager [req-0a2ea267-f183-4711-aaa7-46cd6cc6364b req-04f1a4d9-8b9b-4292-9ad1-a814f8895cc2 service nova] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Received unexpected event network-vif-plugged-c3974c79-c5f4-4ea4-88b0-b0e0a4e63b1f for instance with vm_state building and task_state spawning. [ 2419.014335] env[62684]: DEBUG nova.network.neutron [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Successfully updated port: c3974c79-c5f4-4ea4-88b0-b0e0a4e63b1f {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2419.372715] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b18e2fa9-127c-4594-8da8-c4fb3242adc1 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "8d22d555-f837-4eb3-9474-c1434649584e" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2419.372980] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b18e2fa9-127c-4594-8da8-c4fb3242adc1 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "8d22d555-f837-4eb3-9474-c1434649584e" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2419.373203] env[62684]: DEBUG nova.compute.manager [None req-b18e2fa9-127c-4594-8da8-c4fb3242adc1 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Going to confirm migration 8 {{(pid=62684) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 2419.516794] env[62684]: DEBUG oslo_concurrency.lockutils [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "refresh_cache-4081f322-a854-475a-9a66-3d573128f39d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2419.516977] env[62684]: DEBUG oslo_concurrency.lockutils [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquired lock "refresh_cache-4081f322-a854-475a-9a66-3d573128f39d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2419.517156] env[62684]: DEBUG nova.network.neutron [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2419.943621] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b18e2fa9-127c-4594-8da8-c4fb3242adc1 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "refresh_cache-8d22d555-f837-4eb3-9474-c1434649584e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2419.943862] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b18e2fa9-127c-4594-8da8-c4fb3242adc1 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquired lock "refresh_cache-8d22d555-f837-4eb3-9474-c1434649584e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2419.944046] env[62684]: DEBUG nova.network.neutron [None req-b18e2fa9-127c-4594-8da8-c4fb3242adc1 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2419.944242] env[62684]: DEBUG nova.objects.instance [None req-b18e2fa9-127c-4594-8da8-c4fb3242adc1 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lazy-loading 'info_cache' on Instance uuid 8d22d555-f837-4eb3-9474-c1434649584e {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2420.046224] env[62684]: DEBUG nova.network.neutron [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2420.166176] env[62684]: DEBUG nova.network.neutron [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Updating instance_info_cache with network_info: [{"id": "c3974c79-c5f4-4ea4-88b0-b0e0a4e63b1f", "address": "fa:16:3e:09:e0:b3", "network": {"id": "1751424b-54a9-4879-9f32-aa15a9bb632c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-120070593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "263c101fcc5e493789b79dfd1ba97cc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92e4d027-e755-417b-8eea-9a8f24b85140", "external-id": "nsx-vlan-transportzone-756", "segmentation_id": 756, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3974c79-c5", "ovs_interfaceid": "c3974c79-c5f4-4ea4-88b0-b0e0a4e63b1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2420.668474] env[62684]: DEBUG oslo_concurrency.lockutils [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Releasing lock "refresh_cache-4081f322-a854-475a-9a66-3d573128f39d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2420.668813] env[62684]: DEBUG nova.compute.manager [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Instance network_info: |[{"id": "c3974c79-c5f4-4ea4-88b0-b0e0a4e63b1f", "address": "fa:16:3e:09:e0:b3", "network": {"id": "1751424b-54a9-4879-9f32-aa15a9bb632c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-120070593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "263c101fcc5e493789b79dfd1ba97cc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92e4d027-e755-417b-8eea-9a8f24b85140", "external-id": "nsx-vlan-transportzone-756", "segmentation_id": 756, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3974c79-c5", "ovs_interfaceid": "c3974c79-c5f4-4ea4-88b0-b0e0a4e63b1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2420.669286] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:09:e0:b3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92e4d027-e755-417b-8eea-9a8f24b85140', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c3974c79-c5f4-4ea4-88b0-b0e0a4e63b1f', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2420.676714] env[62684]: DEBUG oslo.service.loopingcall [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2420.676926] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2420.677167] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-22113c79-b3f7-4ad4-a67f-8e0a0f66a53f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2420.697139] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2420.697139] env[62684]: value = "task-2053888" [ 2420.697139] env[62684]: _type = "Task" [ 2420.697139] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2420.704511] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053888, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2420.957331] env[62684]: DEBUG nova.compute.manager [req-df2cce96-7abf-490f-8642-438f91829307 req-f7a95a6f-c722-442a-9676-3225d90b7430 service nova] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Received event network-changed-c3974c79-c5f4-4ea4-88b0-b0e0a4e63b1f {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2420.957627] env[62684]: DEBUG nova.compute.manager [req-df2cce96-7abf-490f-8642-438f91829307 req-f7a95a6f-c722-442a-9676-3225d90b7430 service nova] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Refreshing instance network info cache due to event network-changed-c3974c79-c5f4-4ea4-88b0-b0e0a4e63b1f. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2420.957849] env[62684]: DEBUG oslo_concurrency.lockutils [req-df2cce96-7abf-490f-8642-438f91829307 req-f7a95a6f-c722-442a-9676-3225d90b7430 service nova] Acquiring lock "refresh_cache-4081f322-a854-475a-9a66-3d573128f39d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2420.958215] env[62684]: DEBUG oslo_concurrency.lockutils [req-df2cce96-7abf-490f-8642-438f91829307 req-f7a95a6f-c722-442a-9676-3225d90b7430 service nova] Acquired lock "refresh_cache-4081f322-a854-475a-9a66-3d573128f39d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2420.958489] env[62684]: DEBUG nova.network.neutron [req-df2cce96-7abf-490f-8642-438f91829307 req-f7a95a6f-c722-442a-9676-3225d90b7430 service nova] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Refreshing network info cache for port c3974c79-c5f4-4ea4-88b0-b0e0a4e63b1f {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2421.166490] env[62684]: DEBUG nova.network.neutron [None req-b18e2fa9-127c-4594-8da8-c4fb3242adc1 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Updating instance_info_cache with network_info: [{"id": "83b1b299-d863-45f1-9a11-2ffa0e2bd291", "address": "fa:16:3e:4f:09:7f", "network": {"id": "e177c6d0-ddd5-4029-94af-c8f1b937dd9f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1344612161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.224", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27d04006afc747e19ad87238bfdbaad1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "171aeae0-6a27-44fc-bc3d-a2d5581fc702", "external-id": "nsx-vlan-transportzone-410", "segmentation_id": 410, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83b1b299-d8", "ovs_interfaceid": "83b1b299-d863-45f1-9a11-2ffa0e2bd291", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2421.206420] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053888, 'name': CreateVM_Task, 'duration_secs': 0.382747} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2421.206558] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2421.207213] env[62684]: DEBUG oslo_concurrency.lockutils [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2421.207385] env[62684]: DEBUG oslo_concurrency.lockutils [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2421.207706] env[62684]: DEBUG oslo_concurrency.lockutils [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2421.208213] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd6a5a8a-46d8-40ff-9eed-3776bee75e7a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2421.212470] env[62684]: DEBUG oslo_vmware.api [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2421.212470] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520138d5-be22-a53a-7da7-a9be56aad9e2" [ 2421.212470] env[62684]: _type = "Task" [ 2421.212470] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2421.219946] env[62684]: DEBUG oslo_vmware.api [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520138d5-be22-a53a-7da7-a9be56aad9e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2421.642028] env[62684]: DEBUG nova.network.neutron [req-df2cce96-7abf-490f-8642-438f91829307 req-f7a95a6f-c722-442a-9676-3225d90b7430 service nova] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Updated VIF entry in instance network info cache for port c3974c79-c5f4-4ea4-88b0-b0e0a4e63b1f. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2421.642399] env[62684]: DEBUG nova.network.neutron [req-df2cce96-7abf-490f-8642-438f91829307 req-f7a95a6f-c722-442a-9676-3225d90b7430 service nova] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Updating instance_info_cache with network_info: [{"id": "c3974c79-c5f4-4ea4-88b0-b0e0a4e63b1f", "address": "fa:16:3e:09:e0:b3", "network": {"id": "1751424b-54a9-4879-9f32-aa15a9bb632c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-120070593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "263c101fcc5e493789b79dfd1ba97cc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92e4d027-e755-417b-8eea-9a8f24b85140", "external-id": "nsx-vlan-transportzone-756", "segmentation_id": 756, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3974c79-c5", "ovs_interfaceid": "c3974c79-c5f4-4ea4-88b0-b0e0a4e63b1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2421.669383] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b18e2fa9-127c-4594-8da8-c4fb3242adc1 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Releasing lock "refresh_cache-8d22d555-f837-4eb3-9474-c1434649584e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2421.669674] env[62684]: DEBUG nova.objects.instance [None req-b18e2fa9-127c-4594-8da8-c4fb3242adc1 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lazy-loading 'migration_context' on Instance uuid 8d22d555-f837-4eb3-9474-c1434649584e {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2421.723035] env[62684]: DEBUG oslo_vmware.api [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520138d5-be22-a53a-7da7-a9be56aad9e2, 'name': SearchDatastore_Task, 'duration_secs': 0.011727} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2421.723346] env[62684]: DEBUG oslo_concurrency.lockutils [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2421.723577] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2421.723810] env[62684]: DEBUG oslo_concurrency.lockutils [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2421.723967] env[62684]: DEBUG oslo_concurrency.lockutils [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2421.724162] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2421.724417] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c0e4d5bd-0f7c-45d9-ad96-1c7494550645 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2421.732082] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2421.732260] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2421.732927] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71f9a75a-6a9b-4354-9dcf-7b26f709991a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2421.737657] env[62684]: DEBUG oslo_vmware.api [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2421.737657] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5230d511-1bca-b16e-7b59-d5e3be9f9382" [ 2421.737657] env[62684]: _type = "Task" [ 2421.737657] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2421.745743] env[62684]: DEBUG oslo_vmware.api [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5230d511-1bca-b16e-7b59-d5e3be9f9382, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2422.145466] env[62684]: DEBUG oslo_concurrency.lockutils [req-df2cce96-7abf-490f-8642-438f91829307 req-f7a95a6f-c722-442a-9676-3225d90b7430 service nova] Releasing lock "refresh_cache-4081f322-a854-475a-9a66-3d573128f39d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2422.172680] env[62684]: DEBUG nova.objects.base [None req-b18e2fa9-127c-4594-8da8-c4fb3242adc1 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Object Instance<8d22d555-f837-4eb3-9474-c1434649584e> lazy-loaded attributes: info_cache,migration_context {{(pid=62684) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2422.173673] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77770c9d-14b6-498f-b678-f144f3677ccc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2422.193066] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9189a565-a129-420a-ad03-8ca846fa71bc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2422.198031] env[62684]: DEBUG oslo_vmware.api [None req-b18e2fa9-127c-4594-8da8-c4fb3242adc1 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2422.198031] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5227c86e-8e24-eb91-9b90-5d5919dfb748" [ 2422.198031] env[62684]: _type = "Task" [ 2422.198031] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2422.205268] env[62684]: DEBUG oslo_vmware.api [None req-b18e2fa9-127c-4594-8da8-c4fb3242adc1 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5227c86e-8e24-eb91-9b90-5d5919dfb748, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2422.247501] env[62684]: DEBUG oslo_vmware.api [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5230d511-1bca-b16e-7b59-d5e3be9f9382, 'name': SearchDatastore_Task, 'duration_secs': 0.00878} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2422.248279] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e059aa8c-eb54-47c8-99ae-441931945a56 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2422.253508] env[62684]: DEBUG oslo_vmware.api [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2422.253508] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521029de-d9c3-9354-4ab0-5ae9daca11fc" [ 2422.253508] env[62684]: _type = "Task" [ 2422.253508] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2422.260517] env[62684]: DEBUG oslo_vmware.api [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521029de-d9c3-9354-4ab0-5ae9daca11fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2422.709019] env[62684]: DEBUG oslo_vmware.api [None req-b18e2fa9-127c-4594-8da8-c4fb3242adc1 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5227c86e-8e24-eb91-9b90-5d5919dfb748, 'name': SearchDatastore_Task, 'duration_secs': 0.006885} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2422.709357] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b18e2fa9-127c-4594-8da8-c4fb3242adc1 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2422.709596] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b18e2fa9-127c-4594-8da8-c4fb3242adc1 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2422.763528] env[62684]: DEBUG oslo_vmware.api [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521029de-d9c3-9354-4ab0-5ae9daca11fc, 'name': SearchDatastore_Task, 'duration_secs': 0.00896} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2422.763764] env[62684]: DEBUG oslo_concurrency.lockutils [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2422.764057] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 4081f322-a854-475a-9a66-3d573128f39d/4081f322-a854-475a-9a66-3d573128f39d.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2422.764332] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b9fb65ef-27d1-4494-bb2d-600e790c0dd5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2422.770762] env[62684]: DEBUG oslo_vmware.api [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2422.770762] env[62684]: value = "task-2053889" [ 2422.770762] env[62684]: _type = "Task" [ 2422.770762] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2422.777909] env[62684]: DEBUG oslo_vmware.api [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053889, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2423.283895] env[62684]: DEBUG oslo_vmware.api [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053889, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.442913} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2423.283895] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 4081f322-a854-475a-9a66-3d573128f39d/4081f322-a854-475a-9a66-3d573128f39d.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2423.283895] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2423.283895] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c08b52d7-12ca-447c-b297-d58360dde5f9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2423.289272] env[62684]: DEBUG oslo_vmware.api [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2423.289272] env[62684]: value = "task-2053890" [ 2423.289272] env[62684]: _type = "Task" [ 2423.289272] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2423.296956] env[62684]: DEBUG oslo_vmware.api [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053890, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2423.298502] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96e8ccb1-5612-4b23-b656-dbe34f394a53 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2423.305279] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b241971-dc4d-4584-a18d-5edea3b88739 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2423.335310] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7f0ca67-9d0f-41bb-a3ea-058d37172b55 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2423.342885] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62311496-d6cf-4c64-9c3c-47866cc3a891 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2423.356476] env[62684]: DEBUG nova.compute.provider_tree [None req-b18e2fa9-127c-4594-8da8-c4fb3242adc1 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2423.799953] env[62684]: DEBUG oslo_vmware.api [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053890, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.055969} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2423.800313] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2423.801234] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd3a6117-14e9-4e84-b8c7-9b2b264dc803 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2423.824985] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Reconfiguring VM instance instance-00000078 to attach disk [datastore1] 4081f322-a854-475a-9a66-3d573128f39d/4081f322-a854-475a-9a66-3d573128f39d.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2423.825270] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1e367c5c-df95-4bfd-a3bf-6946054a5280 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2423.844754] env[62684]: DEBUG oslo_vmware.api [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2423.844754] env[62684]: value = "task-2053891" [ 2423.844754] env[62684]: _type = "Task" [ 2423.844754] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2423.852666] env[62684]: DEBUG oslo_vmware.api [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053891, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2423.859667] env[62684]: DEBUG nova.scheduler.client.report [None req-b18e2fa9-127c-4594-8da8-c4fb3242adc1 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2424.354562] env[62684]: DEBUG oslo_vmware.api [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053891, 'name': ReconfigVM_Task, 'duration_secs': 0.258922} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2424.355032] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Reconfigured VM instance instance-00000078 to attach disk [datastore1] 4081f322-a854-475a-9a66-3d573128f39d/4081f322-a854-475a-9a66-3d573128f39d.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2424.355444] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-90a9db08-6280-49a2-b6e0-c17d435e52fa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2424.361946] env[62684]: DEBUG oslo_vmware.api [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2424.361946] env[62684]: value = "task-2053892" [ 2424.361946] env[62684]: _type = "Task" [ 2424.361946] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2424.372599] env[62684]: DEBUG oslo_vmware.api [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053892, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2424.873188] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b18e2fa9-127c-4594-8da8-c4fb3242adc1 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.163s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2424.875819] env[62684]: DEBUG oslo_vmware.api [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053892, 'name': Rename_Task, 'duration_secs': 0.13853} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2424.876277] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2424.876516] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d1bcfd4b-53b4-47ae-96b0-6a040d3cba87 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2424.882111] env[62684]: DEBUG oslo_vmware.api [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2424.882111] env[62684]: value = "task-2053893" [ 2424.882111] env[62684]: _type = "Task" [ 2424.882111] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2424.890072] env[62684]: DEBUG oslo_vmware.api [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053893, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2425.391974] env[62684]: DEBUG oslo_vmware.api [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053893, 'name': PowerOnVM_Task, 'duration_secs': 0.408397} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2425.394559] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2425.394782] env[62684]: INFO nova.compute.manager [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Took 7.68 seconds to spawn the instance on the hypervisor. [ 2425.395015] env[62684]: DEBUG nova.compute.manager [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2425.396291] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9120d491-4049-4bd3-8140-c59e83d779e6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2425.429728] env[62684]: INFO nova.scheduler.client.report [None req-b18e2fa9-127c-4594-8da8-c4fb3242adc1 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Deleted allocation for migration 6a9e0a6e-8ad9-4b93-855d-75cb80c13130 [ 2425.684558] env[62684]: INFO nova.compute.manager [None req-f143d734-ff2d-4eb7-9dce-3eadf6e4ad21 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Get console output [ 2425.684794] env[62684]: WARNING nova.virt.vmwareapi.driver [None req-f143d734-ff2d-4eb7-9dce-3eadf6e4ad21 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] The console log is missing. Check your VSPC configuration [ 2425.911960] env[62684]: INFO nova.compute.manager [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Took 12.38 seconds to build instance. [ 2425.934684] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b18e2fa9-127c-4594-8da8-c4fb3242adc1 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "8d22d555-f837-4eb3-9474-c1434649584e" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.562s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2426.414123] env[62684]: DEBUG oslo_concurrency.lockutils [None req-137ca35c-379a-4de5-bb0c-edc4dccc1759 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "4081f322-a854-475a-9a66-3d573128f39d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.894s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2426.568938] env[62684]: DEBUG nova.compute.manager [req-d1c402ab-ce01-4bca-9e09-510e4cf0df90 req-8652eb04-7296-4b6a-a10a-227e9f5bed74 service nova] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Received event network-changed-c3974c79-c5f4-4ea4-88b0-b0e0a4e63b1f {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2426.569169] env[62684]: DEBUG nova.compute.manager [req-d1c402ab-ce01-4bca-9e09-510e4cf0df90 req-8652eb04-7296-4b6a-a10a-227e9f5bed74 service nova] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Refreshing instance network info cache due to event network-changed-c3974c79-c5f4-4ea4-88b0-b0e0a4e63b1f. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2426.569411] env[62684]: DEBUG oslo_concurrency.lockutils [req-d1c402ab-ce01-4bca-9e09-510e4cf0df90 req-8652eb04-7296-4b6a-a10a-227e9f5bed74 service nova] Acquiring lock "refresh_cache-4081f322-a854-475a-9a66-3d573128f39d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2426.569578] env[62684]: DEBUG oslo_concurrency.lockutils [req-d1c402ab-ce01-4bca-9e09-510e4cf0df90 req-8652eb04-7296-4b6a-a10a-227e9f5bed74 service nova] Acquired lock "refresh_cache-4081f322-a854-475a-9a66-3d573128f39d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2426.569748] env[62684]: DEBUG nova.network.neutron [req-d1c402ab-ce01-4bca-9e09-510e4cf0df90 req-8652eb04-7296-4b6a-a10a-227e9f5bed74 service nova] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Refreshing network info cache for port c3974c79-c5f4-4ea4-88b0-b0e0a4e63b1f {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2427.278963] env[62684]: DEBUG nova.network.neutron [req-d1c402ab-ce01-4bca-9e09-510e4cf0df90 req-8652eb04-7296-4b6a-a10a-227e9f5bed74 service nova] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Updated VIF entry in instance network info cache for port c3974c79-c5f4-4ea4-88b0-b0e0a4e63b1f. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2427.279350] env[62684]: DEBUG nova.network.neutron [req-d1c402ab-ce01-4bca-9e09-510e4cf0df90 req-8652eb04-7296-4b6a-a10a-227e9f5bed74 service nova] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Updating instance_info_cache with network_info: [{"id": "c3974c79-c5f4-4ea4-88b0-b0e0a4e63b1f", "address": "fa:16:3e:09:e0:b3", "network": {"id": "1751424b-54a9-4879-9f32-aa15a9bb632c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-120070593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "263c101fcc5e493789b79dfd1ba97cc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92e4d027-e755-417b-8eea-9a8f24b85140", "external-id": "nsx-vlan-transportzone-756", "segmentation_id": 756, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3974c79-c5", "ovs_interfaceid": "c3974c79-c5f4-4ea4-88b0-b0e0a4e63b1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2427.782231] env[62684]: DEBUG oslo_concurrency.lockutils [req-d1c402ab-ce01-4bca-9e09-510e4cf0df90 req-8652eb04-7296-4b6a-a10a-227e9f5bed74 service nova] Releasing lock "refresh_cache-4081f322-a854-475a-9a66-3d573128f39d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2439.821040] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2440.325027] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2440.325027] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2440.325027] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2440.325027] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62684) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2440.325703] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25389b8f-ce77-4dd1-bd85-9579a16728c3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2440.335323] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45471c01-cb9e-4d1d-ad84-a532987eee1a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2440.350010] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40499644-ca15-4c28-b8e3-a5321e37d0e2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2440.356470] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-605bc8be-2e02-4bed-b980-decaa1c3d138 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2440.384926] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180721MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=62684) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2440.385068] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2440.385254] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2441.549500] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 0156d807-1ab4-482f-91d1-172bf32bf23c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2441.549817] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance b0ddbec0-d578-46df-93fd-9d38c939bd77 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2441.549817] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 603b2c96-44f1-45a7-8209-b799662a3e42 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2441.549954] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 8d22d555-f837-4eb3-9474-c1434649584e actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2441.550085] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 4081f322-a854-475a-9a66-3d573128f39d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2441.550275] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2441.550410] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2441.614064] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e6461ce-44f2-4e6f-a49d-ce5ccb761bba {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2441.621412] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc21811f-b78b-4b0e-9677-88e10ab2e03a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2441.651395] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0551526d-cb16-4039-a688-489298603379 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2441.658461] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ffc8643-c0ab-4cb3-8457-b998494ddb6c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2441.671083] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2442.174687] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2442.679755] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62684) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2442.680175] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.295s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2442.837547] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2442.837798] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Starting heal instance info cache {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2442.837992] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Rebuilding the list of instances to heal {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2443.369602] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "refresh_cache-0156d807-1ab4-482f-91d1-172bf32bf23c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2443.369753] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired lock "refresh_cache-0156d807-1ab4-482f-91d1-172bf32bf23c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2443.369901] env[62684]: DEBUG nova.network.neutron [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Forcefully refreshing network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2443.370068] env[62684]: DEBUG nova.objects.instance [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lazy-loading 'info_cache' on Instance uuid 0156d807-1ab4-482f-91d1-172bf32bf23c {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2445.074263] env[62684]: DEBUG nova.network.neutron [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Updating instance_info_cache with network_info: [{"id": "f5c06971-b96a-4fa0-858e-5e47100e2e68", "address": "fa:16:3e:9e:fc:9d", "network": {"id": "e177c6d0-ddd5-4029-94af-c8f1b937dd9f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1344612161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27d04006afc747e19ad87238bfdbaad1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "171aeae0-6a27-44fc-bc3d-a2d5581fc702", "external-id": "nsx-vlan-transportzone-410", "segmentation_id": 410, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5c06971-b9", "ovs_interfaceid": "f5c06971-b96a-4fa0-858e-5e47100e2e68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2445.577920] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Releasing lock "refresh_cache-0156d807-1ab4-482f-91d1-172bf32bf23c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2445.578188] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Updated the network info_cache for instance {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 2445.578406] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2445.578564] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2446.083064] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Getting list of instances from cluster (obj){ [ 2446.083064] env[62684]: value = "domain-c8" [ 2446.083064] env[62684]: _type = "ClusterComputeResource" [ 2446.083064] env[62684]: } {{(pid=62684) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 2446.083740] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a88081a5-5b46-4d98-a364-331aa5226d8f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2446.098903] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Got total of 5 instances {{(pid=62684) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 2446.099078] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Triggering sync for uuid 0156d807-1ab4-482f-91d1-172bf32bf23c {{(pid=62684) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 2446.099265] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Triggering sync for uuid b0ddbec0-d578-46df-93fd-9d38c939bd77 {{(pid=62684) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 2446.099421] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Triggering sync for uuid 8d22d555-f837-4eb3-9474-c1434649584e {{(pid=62684) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 2446.099572] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Triggering sync for uuid 603b2c96-44f1-45a7-8209-b799662a3e42 {{(pid=62684) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 2446.099719] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Triggering sync for uuid 4081f322-a854-475a-9a66-3d573128f39d {{(pid=62684) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 2446.100051] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "0156d807-1ab4-482f-91d1-172bf32bf23c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2446.100277] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "0156d807-1ab4-482f-91d1-172bf32bf23c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2446.100531] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "b0ddbec0-d578-46df-93fd-9d38c939bd77" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2446.100716] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "b0ddbec0-d578-46df-93fd-9d38c939bd77" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2446.100943] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "8d22d555-f837-4eb3-9474-c1434649584e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2446.101144] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "8d22d555-f837-4eb3-9474-c1434649584e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2446.101375] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "603b2c96-44f1-45a7-8209-b799662a3e42" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2446.101557] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "603b2c96-44f1-45a7-8209-b799662a3e42" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2446.101785] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "4081f322-a854-475a-9a66-3d573128f39d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2446.101996] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "4081f322-a854-475a-9a66-3d573128f39d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2446.102211] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2446.102342] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62684) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2446.103011] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fae0b572-e90d-4a8f-ad1b-68edc3eea026 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2446.105862] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a55fc0c-a44c-4d66-831f-5edbb2a26858 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2446.108430] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1733101-7928-4234-a847-f4fa3a073f1d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2446.111163] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eca61dac-383b-41be-bd88-4911f21a36d1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2446.113728] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5e75226-812b-46d3-bbd0-cd25f91112ae {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2446.630721] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "603b2c96-44f1-45a7-8209-b799662a3e42" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.529s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2446.631121] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "b0ddbec0-d578-46df-93fd-9d38c939bd77" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.530s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2446.631637] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "0156d807-1ab4-482f-91d1-172bf32bf23c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.531s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2446.631998] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "4081f322-a854-475a-9a66-3d573128f39d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.530s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2446.632320] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "8d22d555-f837-4eb3-9474-c1434649584e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.531s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2446.686330] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8827340a-3a0e-4117-9ae4-4cd31905a63d tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "603b2c96-44f1-45a7-8209-b799662a3e42" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2446.686573] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8827340a-3a0e-4117-9ae4-4cd31905a63d tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "603b2c96-44f1-45a7-8209-b799662a3e42" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2446.837526] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2447.189370] env[62684]: DEBUG nova.compute.utils [None req-8827340a-3a0e-4117-9ae4-4cd31905a63d tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2447.300024] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2447.692496] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8827340a-3a0e-4117-9ae4-4cd31905a63d tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "603b2c96-44f1-45a7-8209-b799662a3e42" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2448.295752] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2448.300308] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2448.750459] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8827340a-3a0e-4117-9ae4-4cd31905a63d tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "603b2c96-44f1-45a7-8209-b799662a3e42" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2448.750730] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8827340a-3a0e-4117-9ae4-4cd31905a63d tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "603b2c96-44f1-45a7-8209-b799662a3e42" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2448.750969] env[62684]: INFO nova.compute.manager [None req-8827340a-3a0e-4117-9ae4-4cd31905a63d tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Attaching volume f41aa5ac-b1a6-4fe2-aeb2-078d58c348a5 to /dev/sdb [ 2448.780835] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14133f17-e55e-46a5-9ea5-d72fc82a2db3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2448.788189] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa1bde94-4886-4ac8-9d22-5815781a0e86 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2448.802455] env[62684]: DEBUG nova.virt.block_device [None req-8827340a-3a0e-4117-9ae4-4cd31905a63d tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Updating existing volume attachment record: c3b0c782-a058-47fc-b3f0-1993f228ebe5 {{(pid=62684) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2449.300640] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2452.571373] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "8d22d555-f837-4eb3-9474-c1434649584e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2452.571726] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "8d22d555-f837-4eb3-9474-c1434649584e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2452.571882] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "8d22d555-f837-4eb3-9474-c1434649584e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2452.572126] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "8d22d555-f837-4eb3-9474-c1434649584e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2452.572313] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "8d22d555-f837-4eb3-9474-c1434649584e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2452.576301] env[62684]: INFO nova.compute.manager [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Terminating instance [ 2452.578281] env[62684]: DEBUG nova.compute.manager [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2452.578500] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2452.578746] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-461644e0-f4ee-4ee2-8113-1eea5daed82f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2452.585876] env[62684]: DEBUG oslo_vmware.api [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2452.585876] env[62684]: value = "task-2053896" [ 2452.585876] env[62684]: _type = "Task" [ 2452.585876] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2452.594716] env[62684]: DEBUG oslo_vmware.api [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053896, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2453.095756] env[62684]: DEBUG oslo_vmware.api [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053896, 'name': PowerOffVM_Task, 'duration_secs': 0.234228} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2453.096024] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2453.096238] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Volume detach. Driver type: vmdk {{(pid=62684) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2453.096444] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421420', 'volume_id': 'c148a6a9-f313-4a0d-8466-261a9903a3c8', 'name': 'volume-c148a6a9-f313-4a0d-8466-261a9903a3c8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '8d22d555-f837-4eb3-9474-c1434649584e', 'attached_at': '2025-01-10T07:57:35.000000', 'detached_at': '', 'volume_id': 'c148a6a9-f313-4a0d-8466-261a9903a3c8', 'serial': 'c148a6a9-f313-4a0d-8466-261a9903a3c8'} {{(pid=62684) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2453.097205] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66762668-caab-4b56-ba71-9b5c5eded36e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2453.115222] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b41e94f3-8e3c-4d73-93e9-9b89bc54abe7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2453.121254] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69d83c48-a284-44ec-97ba-7fd79ad93d31 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2453.137698] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ca852cd-36a4-4f15-8c7d-4c0f42752ee2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2453.151462] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] The volume has not been displaced from its original location: [datastore1] volume-c148a6a9-f313-4a0d-8466-261a9903a3c8/volume-c148a6a9-f313-4a0d-8466-261a9903a3c8.vmdk. No consolidation needed. {{(pid=62684) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2453.156579] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Reconfiguring VM instance instance-00000076 to detach disk 2000 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2453.156828] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3e563d2a-03c9-4b0b-94ca-dce9be8084da {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2453.174745] env[62684]: DEBUG oslo_vmware.api [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2453.174745] env[62684]: value = "task-2053897" [ 2453.174745] env[62684]: _type = "Task" [ 2453.174745] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2453.182291] env[62684]: DEBUG oslo_vmware.api [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053897, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2453.345298] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-8827340a-3a0e-4117-9ae4-4cd31905a63d tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Volume attach. Driver type: vmdk {{(pid=62684) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2453.345553] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-8827340a-3a0e-4117-9ae4-4cd31905a63d tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421429', 'volume_id': 'f41aa5ac-b1a6-4fe2-aeb2-078d58c348a5', 'name': 'volume-f41aa5ac-b1a6-4fe2-aeb2-078d58c348a5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '603b2c96-44f1-45a7-8209-b799662a3e42', 'attached_at': '', 'detached_at': '', 'volume_id': 'f41aa5ac-b1a6-4fe2-aeb2-078d58c348a5', 'serial': 'f41aa5ac-b1a6-4fe2-aeb2-078d58c348a5'} {{(pid=62684) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2453.346456] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0a97c7e-7a97-43c5-82ee-9fe0818cfba7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2453.363285] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35879bbf-80b7-432d-b263-645ed3ab8061 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2453.386761] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-8827340a-3a0e-4117-9ae4-4cd31905a63d tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Reconfiguring VM instance instance-00000077 to attach disk [datastore2] volume-f41aa5ac-b1a6-4fe2-aeb2-078d58c348a5/volume-f41aa5ac-b1a6-4fe2-aeb2-078d58c348a5.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2453.386992] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f8bc6d6b-4cb2-4493-89bd-3de08cf17382 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2453.404527] env[62684]: DEBUG oslo_vmware.api [None req-8827340a-3a0e-4117-9ae4-4cd31905a63d tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2453.404527] env[62684]: value = "task-2053898" [ 2453.404527] env[62684]: _type = "Task" [ 2453.404527] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2453.412069] env[62684]: DEBUG oslo_vmware.api [None req-8827340a-3a0e-4117-9ae4-4cd31905a63d tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053898, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2453.684403] env[62684]: DEBUG oslo_vmware.api [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053897, 'name': ReconfigVM_Task, 'duration_secs': 0.160798} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2453.684888] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Reconfigured VM instance instance-00000076 to detach disk 2000 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2453.689236] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-08595901-4c17-4147-8db0-4c8700ebc796 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2453.705015] env[62684]: DEBUG oslo_vmware.api [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2453.705015] env[62684]: value = "task-2053899" [ 2453.705015] env[62684]: _type = "Task" [ 2453.705015] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2453.712963] env[62684]: DEBUG oslo_vmware.api [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053899, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2453.914594] env[62684]: DEBUG oslo_vmware.api [None req-8827340a-3a0e-4117-9ae4-4cd31905a63d tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053898, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2454.215026] env[62684]: DEBUG oslo_vmware.api [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053899, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2454.416211] env[62684]: DEBUG oslo_vmware.api [None req-8827340a-3a0e-4117-9ae4-4cd31905a63d tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053898, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2454.714900] env[62684]: DEBUG oslo_vmware.api [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053899, 'name': ReconfigVM_Task, 'duration_secs': 0.686107} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2454.715308] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421420', 'volume_id': 'c148a6a9-f313-4a0d-8466-261a9903a3c8', 'name': 'volume-c148a6a9-f313-4a0d-8466-261a9903a3c8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '8d22d555-f837-4eb3-9474-c1434649584e', 'attached_at': '2025-01-10T07:57:35.000000', 'detached_at': '', 'volume_id': 'c148a6a9-f313-4a0d-8466-261a9903a3c8', 'serial': 'c148a6a9-f313-4a0d-8466-261a9903a3c8'} {{(pid=62684) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2454.715554] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2454.716277] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7825eeb9-7771-4a0d-9980-ef7d024908a6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2454.722834] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2454.723077] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3ffb112c-f9f9-4cbd-96cc-bbbc89aa1ce9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2454.916311] env[62684]: DEBUG oslo_vmware.api [None req-8827340a-3a0e-4117-9ae4-4cd31905a63d tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053898, 'name': ReconfigVM_Task, 'duration_secs': 1.263501} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2454.916614] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-8827340a-3a0e-4117-9ae4-4cd31905a63d tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Reconfigured VM instance instance-00000077 to attach disk [datastore2] volume-f41aa5ac-b1a6-4fe2-aeb2-078d58c348a5/volume-f41aa5ac-b1a6-4fe2-aeb2-078d58c348a5.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2454.921178] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-14dcad5a-b511-4220-aef4-204904de8f9f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2454.936413] env[62684]: DEBUG oslo_vmware.api [None req-8827340a-3a0e-4117-9ae4-4cd31905a63d tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2454.936413] env[62684]: value = "task-2053901" [ 2454.936413] env[62684]: _type = "Task" [ 2454.936413] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2454.944291] env[62684]: DEBUG oslo_vmware.api [None req-8827340a-3a0e-4117-9ae4-4cd31905a63d tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053901, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2455.446465] env[62684]: DEBUG oslo_vmware.api [None req-8827340a-3a0e-4117-9ae4-4cd31905a63d tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053901, 'name': ReconfigVM_Task, 'duration_secs': 0.135565} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2455.447476] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-8827340a-3a0e-4117-9ae4-4cd31905a63d tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421429', 'volume_id': 'f41aa5ac-b1a6-4fe2-aeb2-078d58c348a5', 'name': 'volume-f41aa5ac-b1a6-4fe2-aeb2-078d58c348a5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '603b2c96-44f1-45a7-8209-b799662a3e42', 'attached_at': '', 'detached_at': '', 'volume_id': 'f41aa5ac-b1a6-4fe2-aeb2-078d58c348a5', 'serial': 'f41aa5ac-b1a6-4fe2-aeb2-078d58c348a5'} {{(pid=62684) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2456.482767] env[62684]: DEBUG nova.objects.instance [None req-8827340a-3a0e-4117-9ae4-4cd31905a63d tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lazy-loading 'flavor' on Instance uuid 603b2c96-44f1-45a7-8209-b799662a3e42 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2456.989606] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8827340a-3a0e-4117-9ae4-4cd31905a63d tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "603b2c96-44f1-45a7-8209-b799662a3e42" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.239s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2457.217391] env[62684]: DEBUG oslo_concurrency.lockutils [None req-94e4bfcf-f937-484b-b83b-eb0c323eb5de tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "603b2c96-44f1-45a7-8209-b799662a3e42" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2457.217727] env[62684]: DEBUG oslo_concurrency.lockutils [None req-94e4bfcf-f937-484b-b83b-eb0c323eb5de tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "603b2c96-44f1-45a7-8209-b799662a3e42" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2457.721375] env[62684]: INFO nova.compute.manager [None req-94e4bfcf-f937-484b-b83b-eb0c323eb5de tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Detaching volume f41aa5ac-b1a6-4fe2-aeb2-078d58c348a5 [ 2457.749469] env[62684]: INFO nova.virt.block_device [None req-94e4bfcf-f937-484b-b83b-eb0c323eb5de tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Attempting to driver detach volume f41aa5ac-b1a6-4fe2-aeb2-078d58c348a5 from mountpoint /dev/sdb [ 2457.749719] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-94e4bfcf-f937-484b-b83b-eb0c323eb5de tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Volume detach. Driver type: vmdk {{(pid=62684) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2457.749914] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-94e4bfcf-f937-484b-b83b-eb0c323eb5de tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421429', 'volume_id': 'f41aa5ac-b1a6-4fe2-aeb2-078d58c348a5', 'name': 'volume-f41aa5ac-b1a6-4fe2-aeb2-078d58c348a5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '603b2c96-44f1-45a7-8209-b799662a3e42', 'attached_at': '', 'detached_at': '', 'volume_id': 'f41aa5ac-b1a6-4fe2-aeb2-078d58c348a5', 'serial': 'f41aa5ac-b1a6-4fe2-aeb2-078d58c348a5'} {{(pid=62684) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2457.750832] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2103cc0-50cc-4caf-8f71-1d570327c97c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2457.773210] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90e89739-45f4-48a8-908c-8cb28730d14e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2457.780090] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b31c526-068c-48c5-873b-d6a7c9362c15 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2457.799568] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b075a5-a49f-428b-87fd-d949b710fc0b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2457.814011] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-94e4bfcf-f937-484b-b83b-eb0c323eb5de tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] The volume has not been displaced from its original location: [datastore2] volume-f41aa5ac-b1a6-4fe2-aeb2-078d58c348a5/volume-f41aa5ac-b1a6-4fe2-aeb2-078d58c348a5.vmdk. No consolidation needed. {{(pid=62684) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2457.819101] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-94e4bfcf-f937-484b-b83b-eb0c323eb5de tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Reconfiguring VM instance instance-00000077 to detach disk 2001 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2457.819377] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-63deeaac-9330-46d6-9855-733764110486 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2457.836452] env[62684]: DEBUG oslo_vmware.api [None req-94e4bfcf-f937-484b-b83b-eb0c323eb5de tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2457.836452] env[62684]: value = "task-2053902" [ 2457.836452] env[62684]: _type = "Task" [ 2457.836452] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2457.845171] env[62684]: DEBUG oslo_vmware.api [None req-94e4bfcf-f937-484b-b83b-eb0c323eb5de tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053902, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2458.346570] env[62684]: DEBUG oslo_vmware.api [None req-94e4bfcf-f937-484b-b83b-eb0c323eb5de tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053902, 'name': ReconfigVM_Task, 'duration_secs': 0.212601} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2458.346834] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-94e4bfcf-f937-484b-b83b-eb0c323eb5de tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Reconfigured VM instance instance-00000077 to detach disk 2001 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2458.351264] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5bdd0af7-f6b2-4f2c-93f5-c190cac74212 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2458.365991] env[62684]: DEBUG oslo_vmware.api [None req-94e4bfcf-f937-484b-b83b-eb0c323eb5de tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2458.365991] env[62684]: value = "task-2053903" [ 2458.365991] env[62684]: _type = "Task" [ 2458.365991] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2458.374167] env[62684]: DEBUG oslo_vmware.api [None req-94e4bfcf-f937-484b-b83b-eb0c323eb5de tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053903, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2458.634177] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2458.634410] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2458.634566] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Deleting the datastore file [datastore1] 8d22d555-f837-4eb3-9474-c1434649584e {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2458.634849] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e3788bc3-f615-469a-b01e-fcabd6d5311a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2458.641055] env[62684]: DEBUG oslo_vmware.api [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2458.641055] env[62684]: value = "task-2053904" [ 2458.641055] env[62684]: _type = "Task" [ 2458.641055] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2458.648452] env[62684]: DEBUG oslo_vmware.api [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053904, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2458.875804] env[62684]: DEBUG oslo_vmware.api [None req-94e4bfcf-f937-484b-b83b-eb0c323eb5de tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053903, 'name': ReconfigVM_Task, 'duration_secs': 0.146713} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2458.876182] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-94e4bfcf-f937-484b-b83b-eb0c323eb5de tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421429', 'volume_id': 'f41aa5ac-b1a6-4fe2-aeb2-078d58c348a5', 'name': 'volume-f41aa5ac-b1a6-4fe2-aeb2-078d58c348a5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '603b2c96-44f1-45a7-8209-b799662a3e42', 'attached_at': '', 'detached_at': '', 'volume_id': 'f41aa5ac-b1a6-4fe2-aeb2-078d58c348a5', 'serial': 'f41aa5ac-b1a6-4fe2-aeb2-078d58c348a5'} {{(pid=62684) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2459.151840] env[62684]: DEBUG oslo_vmware.api [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053904, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.084294} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2459.152077] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2459.152291] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2459.152485] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2459.152672] env[62684]: INFO nova.compute.manager [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Took 6.57 seconds to destroy the instance on the hypervisor. [ 2459.152923] env[62684]: DEBUG oslo.service.loopingcall [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2459.153144] env[62684]: DEBUG nova.compute.manager [-] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2459.153240] env[62684]: DEBUG nova.network.neutron [-] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2459.418785] env[62684]: DEBUG nova.objects.instance [None req-94e4bfcf-f937-484b-b83b-eb0c323eb5de tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lazy-loading 'flavor' on Instance uuid 603b2c96-44f1-45a7-8209-b799662a3e42 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2459.601233] env[62684]: DEBUG nova.compute.manager [req-7aa4720c-e3d0-4013-bef4-ca25cba4541f req-ab57ed0b-179c-49c7-bc92-97bce1503127 service nova] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Received event network-vif-deleted-83b1b299-d863-45f1-9a11-2ffa0e2bd291 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2459.601421] env[62684]: INFO nova.compute.manager [req-7aa4720c-e3d0-4013-bef4-ca25cba4541f req-ab57ed0b-179c-49c7-bc92-97bce1503127 service nova] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Neutron deleted interface 83b1b299-d863-45f1-9a11-2ffa0e2bd291; detaching it from the instance and deleting it from the info cache [ 2459.601605] env[62684]: DEBUG nova.network.neutron [req-7aa4720c-e3d0-4013-bef4-ca25cba4541f req-ab57ed0b-179c-49c7-bc92-97bce1503127 service nova] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2460.078038] env[62684]: DEBUG nova.network.neutron [-] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2460.103889] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b9518473-5b10-4d08-8e11-744334c1db43 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2460.114300] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9c6f0ed-4135-4e6c-abed-858f443f718b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2460.140876] env[62684]: DEBUG nova.compute.manager [req-7aa4720c-e3d0-4013-bef4-ca25cba4541f req-ab57ed0b-179c-49c7-bc92-97bce1503127 service nova] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Detach interface failed, port_id=83b1b299-d863-45f1-9a11-2ffa0e2bd291, reason: Instance 8d22d555-f837-4eb3-9474-c1434649584e could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2460.425669] env[62684]: DEBUG oslo_concurrency.lockutils [None req-94e4bfcf-f937-484b-b83b-eb0c323eb5de tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "603b2c96-44f1-45a7-8209-b799662a3e42" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.208s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2460.580260] env[62684]: INFO nova.compute.manager [-] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Took 1.43 seconds to deallocate network for instance. [ 2461.120947] env[62684]: INFO nova.compute.manager [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Took 0.54 seconds to detach 1 volumes for instance. [ 2461.123718] env[62684]: DEBUG nova.compute.manager [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Deleting volume: c148a6a9-f313-4a0d-8466-261a9903a3c8 {{(pid=62684) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3248}} [ 2461.455997] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6c83dfd6-7081-44e0-98dd-452cf225b4e8 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "603b2c96-44f1-45a7-8209-b799662a3e42" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2461.456258] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6c83dfd6-7081-44e0-98dd-452cf225b4e8 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "603b2c96-44f1-45a7-8209-b799662a3e42" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2461.456447] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6c83dfd6-7081-44e0-98dd-452cf225b4e8 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "603b2c96-44f1-45a7-8209-b799662a3e42-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2461.456640] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6c83dfd6-7081-44e0-98dd-452cf225b4e8 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "603b2c96-44f1-45a7-8209-b799662a3e42-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2461.456817] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6c83dfd6-7081-44e0-98dd-452cf225b4e8 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "603b2c96-44f1-45a7-8209-b799662a3e42-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2461.458890] env[62684]: INFO nova.compute.manager [None req-6c83dfd6-7081-44e0-98dd-452cf225b4e8 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Terminating instance [ 2461.460580] env[62684]: DEBUG nova.compute.manager [None req-6c83dfd6-7081-44e0-98dd-452cf225b4e8 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2461.460777] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6c83dfd6-7081-44e0-98dd-452cf225b4e8 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2461.461638] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92d83f5e-3b71-469d-af46-7d086f64a097 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2461.469308] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c83dfd6-7081-44e0-98dd-452cf225b4e8 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2461.469531] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-09b66309-d236-462c-a479-69b2b3425afa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2461.474904] env[62684]: DEBUG oslo_vmware.api [None req-6c83dfd6-7081-44e0-98dd-452cf225b4e8 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2461.474904] env[62684]: value = "task-2053906" [ 2461.474904] env[62684]: _type = "Task" [ 2461.474904] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2461.481664] env[62684]: DEBUG oslo_vmware.api [None req-6c83dfd6-7081-44e0-98dd-452cf225b4e8 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053906, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2461.509574] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f7113e09-b9c9-46a9-99a7-8e19dcae3b73 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "4081f322-a854-475a-9a66-3d573128f39d" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2461.509961] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f7113e09-b9c9-46a9-99a7-8e19dcae3b73 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "4081f322-a854-475a-9a66-3d573128f39d" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2461.510229] env[62684]: DEBUG nova.compute.manager [None req-f7113e09-b9c9-46a9-99a7-8e19dcae3b73 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2461.511097] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-033e7c17-b97d-408e-9b35-2d01f1e5f805 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2461.517697] env[62684]: DEBUG nova.compute.manager [None req-f7113e09-b9c9-46a9-99a7-8e19dcae3b73 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62684) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 2461.518293] env[62684]: DEBUG nova.objects.instance [None req-f7113e09-b9c9-46a9-99a7-8e19dcae3b73 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lazy-loading 'flavor' on Instance uuid 4081f322-a854-475a-9a66-3d573128f39d {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2461.660065] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2461.660378] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2461.660611] env[62684]: DEBUG nova.objects.instance [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lazy-loading 'resources' on Instance uuid 8d22d555-f837-4eb3-9474-c1434649584e {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2461.984844] env[62684]: DEBUG oslo_vmware.api [None req-6c83dfd6-7081-44e0-98dd-452cf225b4e8 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053906, 'name': PowerOffVM_Task, 'duration_secs': 0.165157} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2461.985167] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c83dfd6-7081-44e0-98dd-452cf225b4e8 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2461.985374] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6c83dfd6-7081-44e0-98dd-452cf225b4e8 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2461.985633] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6abe54c0-0ffb-4716-8860-ade95b30048b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.024057] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7113e09-b9c9-46a9-99a7-8e19dcae3b73 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2462.024057] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d0bc348e-7609-4067-9d53-0755a9c13e5d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.029635] env[62684]: DEBUG oslo_vmware.api [None req-f7113e09-b9c9-46a9-99a7-8e19dcae3b73 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2462.029635] env[62684]: value = "task-2053908" [ 2462.029635] env[62684]: _type = "Task" [ 2462.029635] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2462.037900] env[62684]: DEBUG oslo_vmware.api [None req-f7113e09-b9c9-46a9-99a7-8e19dcae3b73 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053908, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2462.172147] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6c83dfd6-7081-44e0-98dd-452cf225b4e8 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2462.172527] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6c83dfd6-7081-44e0-98dd-452cf225b4e8 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2462.172527] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c83dfd6-7081-44e0-98dd-452cf225b4e8 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Deleting the datastore file [datastore1] 603b2c96-44f1-45a7-8209-b799662a3e42 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2462.173910] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5ad53d41-5941-4850-8152-0b055d825b84 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.179168] env[62684]: DEBUG oslo_vmware.api [None req-6c83dfd6-7081-44e0-98dd-452cf225b4e8 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2462.179168] env[62684]: value = "task-2053909" [ 2462.179168] env[62684]: _type = "Task" [ 2462.179168] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2462.189853] env[62684]: DEBUG oslo_vmware.api [None req-6c83dfd6-7081-44e0-98dd-452cf225b4e8 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053909, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2462.248550] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7ada0af-2ab8-4624-847d-41c12d6114c2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.255786] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffd721ea-59fc-4cef-8d96-c2d816c038a8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.292687] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d7849c8-1e81-4af9-bcf2-56c4724ec39d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.300236] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a29a116f-8f43-4f1a-a197-8a61d693659f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.313331] env[62684]: DEBUG nova.compute.provider_tree [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2462.539805] env[62684]: DEBUG oslo_vmware.api [None req-f7113e09-b9c9-46a9-99a7-8e19dcae3b73 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053908, 'name': PowerOffVM_Task, 'duration_secs': 0.186662} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2462.540031] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7113e09-b9c9-46a9-99a7-8e19dcae3b73 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2462.540211] env[62684]: DEBUG nova.compute.manager [None req-f7113e09-b9c9-46a9-99a7-8e19dcae3b73 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2462.541029] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-248a2f81-f38f-469f-bb42-a20c0346ef40 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.688128] env[62684]: DEBUG oslo_vmware.api [None req-6c83dfd6-7081-44e0-98dd-452cf225b4e8 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053909, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142571} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2462.688399] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c83dfd6-7081-44e0-98dd-452cf225b4e8 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2462.688595] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6c83dfd6-7081-44e0-98dd-452cf225b4e8 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2462.688781] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-6c83dfd6-7081-44e0-98dd-452cf225b4e8 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2462.688959] env[62684]: INFO nova.compute.manager [None req-6c83dfd6-7081-44e0-98dd-452cf225b4e8 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Took 1.23 seconds to destroy the instance on the hypervisor. [ 2462.689226] env[62684]: DEBUG oslo.service.loopingcall [None req-6c83dfd6-7081-44e0-98dd-452cf225b4e8 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2462.689426] env[62684]: DEBUG nova.compute.manager [-] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2462.689523] env[62684]: DEBUG nova.network.neutron [-] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2462.816448] env[62684]: DEBUG nova.scheduler.client.report [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2463.051919] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f7113e09-b9c9-46a9-99a7-8e19dcae3b73 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "4081f322-a854-475a-9a66-3d573128f39d" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.542s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2463.123547] env[62684]: DEBUG nova.compute.manager [req-27057b88-65a7-4f88-8527-d3f930e2d9da req-16c57a44-0ff7-48b8-8456-ad01fed7ecb0 service nova] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Received event network-vif-deleted-cc3f2e6e-2b58-4d5c-a8a3-5c8f5d710d31 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2463.123547] env[62684]: INFO nova.compute.manager [req-27057b88-65a7-4f88-8527-d3f930e2d9da req-16c57a44-0ff7-48b8-8456-ad01fed7ecb0 service nova] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Neutron deleted interface cc3f2e6e-2b58-4d5c-a8a3-5c8f5d710d31; detaching it from the instance and deleting it from the info cache [ 2463.123547] env[62684]: DEBUG nova.network.neutron [req-27057b88-65a7-4f88-8527-d3f930e2d9da req-16c57a44-0ff7-48b8-8456-ad01fed7ecb0 service nova] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2463.324058] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.664s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2463.351828] env[62684]: INFO nova.scheduler.client.report [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Deleted allocations for instance 8d22d555-f837-4eb3-9474-c1434649584e [ 2463.602701] env[62684]: DEBUG nova.network.neutron [-] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2463.625802] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9542d086-ff10-4dac-a2bc-0f51a72840a4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2463.636570] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f82e6af1-1c83-4886-a12e-ea730fd5142e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2463.664140] env[62684]: DEBUG nova.compute.manager [req-27057b88-65a7-4f88-8527-d3f930e2d9da req-16c57a44-0ff7-48b8-8456-ad01fed7ecb0 service nova] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Detach interface failed, port_id=cc3f2e6e-2b58-4d5c-a8a3-5c8f5d710d31, reason: Instance 603b2c96-44f1-45a7-8209-b799662a3e42 could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2463.860367] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b58dfcad-bdca-4d3b-b1f3-ed4be2140807 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "8d22d555-f837-4eb3-9474-c1434649584e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.288s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2463.961684] env[62684]: DEBUG nova.objects.instance [None req-1f63f1b2-63bf-4398-8c89-1b25e6479932 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lazy-loading 'flavor' on Instance uuid 4081f322-a854-475a-9a66-3d573128f39d {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2464.105043] env[62684]: INFO nova.compute.manager [-] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Took 1.41 seconds to deallocate network for instance. [ 2464.467491] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1f63f1b2-63bf-4398-8c89-1b25e6479932 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "refresh_cache-4081f322-a854-475a-9a66-3d573128f39d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2464.467813] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1f63f1b2-63bf-4398-8c89-1b25e6479932 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquired lock "refresh_cache-4081f322-a854-475a-9a66-3d573128f39d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2464.467813] env[62684]: DEBUG nova.network.neutron [None req-1f63f1b2-63bf-4398-8c89-1b25e6479932 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2464.467931] env[62684]: DEBUG nova.objects.instance [None req-1f63f1b2-63bf-4398-8c89-1b25e6479932 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lazy-loading 'info_cache' on Instance uuid 4081f322-a854-475a-9a66-3d573128f39d {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2464.610620] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6c83dfd6-7081-44e0-98dd-452cf225b4e8 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2464.610896] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6c83dfd6-7081-44e0-98dd-452cf225b4e8 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2464.611530] env[62684]: DEBUG nova.objects.instance [None req-6c83dfd6-7081-44e0-98dd-452cf225b4e8 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lazy-loading 'resources' on Instance uuid 603b2c96-44f1-45a7-8209-b799662a3e42 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2464.646162] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a7218c7c-9ed1-4b2b-9bf0-1036a8988dd9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "b0ddbec0-d578-46df-93fd-9d38c939bd77" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2464.646406] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a7218c7c-9ed1-4b2b-9bf0-1036a8988dd9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "b0ddbec0-d578-46df-93fd-9d38c939bd77" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2464.646609] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a7218c7c-9ed1-4b2b-9bf0-1036a8988dd9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "b0ddbec0-d578-46df-93fd-9d38c939bd77-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2464.646795] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a7218c7c-9ed1-4b2b-9bf0-1036a8988dd9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "b0ddbec0-d578-46df-93fd-9d38c939bd77-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2464.646964] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a7218c7c-9ed1-4b2b-9bf0-1036a8988dd9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "b0ddbec0-d578-46df-93fd-9d38c939bd77-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2464.648937] env[62684]: INFO nova.compute.manager [None req-a7218c7c-9ed1-4b2b-9bf0-1036a8988dd9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Terminating instance [ 2464.650589] env[62684]: DEBUG nova.compute.manager [None req-a7218c7c-9ed1-4b2b-9bf0-1036a8988dd9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2464.650785] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a7218c7c-9ed1-4b2b-9bf0-1036a8988dd9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2464.651614] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b180ecc2-2f05-4a48-b534-277c2c93a4e9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2464.660852] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7218c7c-9ed1-4b2b-9bf0-1036a8988dd9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2464.661085] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dfed16b5-7d21-4519-ad7a-ae25f69edccc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2464.667092] env[62684]: DEBUG oslo_vmware.api [None req-a7218c7c-9ed1-4b2b-9bf0-1036a8988dd9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2464.667092] env[62684]: value = "task-2053910" [ 2464.667092] env[62684]: _type = "Task" [ 2464.667092] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2464.674886] env[62684]: DEBUG oslo_vmware.api [None req-a7218c7c-9ed1-4b2b-9bf0-1036a8988dd9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053910, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2464.971431] env[62684]: DEBUG nova.objects.base [None req-1f63f1b2-63bf-4398-8c89-1b25e6479932 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Object Instance<4081f322-a854-475a-9a66-3d573128f39d> lazy-loaded attributes: flavor,info_cache {{(pid=62684) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2465.173857] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36165c29-8caf-44e3-ae6c-4ed480eb0569 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2465.179385] env[62684]: DEBUG oslo_vmware.api [None req-a7218c7c-9ed1-4b2b-9bf0-1036a8988dd9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053910, 'name': PowerOffVM_Task, 'duration_secs': 0.230029} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2465.180026] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7218c7c-9ed1-4b2b-9bf0-1036a8988dd9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2465.180277] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a7218c7c-9ed1-4b2b-9bf0-1036a8988dd9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2465.180530] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-faf1d8ac-d0ba-40b1-ba0c-c16b6a3b2b59 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2465.184481] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4350eaef-cf4c-4239-9763-782e41a88bab {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2465.213138] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7915be8d-eb9f-43b8-a9f6-19bcd00f1077 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2465.220128] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68c2f35e-020b-468a-a155-723e20734da5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2465.234020] env[62684]: DEBUG nova.compute.provider_tree [None req-6c83dfd6-7081-44e0-98dd-452cf225b4e8 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2465.290415] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a7218c7c-9ed1-4b2b-9bf0-1036a8988dd9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2465.290671] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a7218c7c-9ed1-4b2b-9bf0-1036a8988dd9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2465.290875] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7218c7c-9ed1-4b2b-9bf0-1036a8988dd9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Deleting the datastore file [datastore2] b0ddbec0-d578-46df-93fd-9d38c939bd77 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2465.291160] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-56186693-66ea-402b-b61c-374505ed3a0f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2465.297297] env[62684]: DEBUG oslo_vmware.api [None req-a7218c7c-9ed1-4b2b-9bf0-1036a8988dd9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2465.297297] env[62684]: value = "task-2053912" [ 2465.297297] env[62684]: _type = "Task" [ 2465.297297] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2465.304792] env[62684]: DEBUG oslo_vmware.api [None req-a7218c7c-9ed1-4b2b-9bf0-1036a8988dd9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053912, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2465.681634] env[62684]: DEBUG nova.network.neutron [None req-1f63f1b2-63bf-4398-8c89-1b25e6479932 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Updating instance_info_cache with network_info: [{"id": "c3974c79-c5f4-4ea4-88b0-b0e0a4e63b1f", "address": "fa:16:3e:09:e0:b3", "network": {"id": "1751424b-54a9-4879-9f32-aa15a9bb632c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-120070593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "263c101fcc5e493789b79dfd1ba97cc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92e4d027-e755-417b-8eea-9a8f24b85140", "external-id": "nsx-vlan-transportzone-756", "segmentation_id": 756, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3974c79-c5", "ovs_interfaceid": "c3974c79-c5f4-4ea4-88b0-b0e0a4e63b1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2465.736740] env[62684]: DEBUG nova.scheduler.client.report [None req-6c83dfd6-7081-44e0-98dd-452cf225b4e8 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2465.807413] env[62684]: DEBUG oslo_vmware.api [None req-a7218c7c-9ed1-4b2b-9bf0-1036a8988dd9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053912, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140455} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2465.807616] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7218c7c-9ed1-4b2b-9bf0-1036a8988dd9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2465.807807] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a7218c7c-9ed1-4b2b-9bf0-1036a8988dd9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2465.807991] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-a7218c7c-9ed1-4b2b-9bf0-1036a8988dd9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2465.808190] env[62684]: INFO nova.compute.manager [None req-a7218c7c-9ed1-4b2b-9bf0-1036a8988dd9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Took 1.16 seconds to destroy the instance on the hypervisor. [ 2465.808427] env[62684]: DEBUG oslo.service.loopingcall [None req-a7218c7c-9ed1-4b2b-9bf0-1036a8988dd9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2465.808612] env[62684]: DEBUG nova.compute.manager [-] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2465.808708] env[62684]: DEBUG nova.network.neutron [-] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2466.047696] env[62684]: DEBUG nova.compute.manager [req-07d70c72-17bc-408b-87a2-72e8299a8573 req-ea406bee-abc2-4084-b13f-d8c4b633d0fb service nova] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Received event network-vif-deleted-47cf42e1-cff0-4bcb-9b54-03051121bd6b {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2466.048042] env[62684]: INFO nova.compute.manager [req-07d70c72-17bc-408b-87a2-72e8299a8573 req-ea406bee-abc2-4084-b13f-d8c4b633d0fb service nova] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Neutron deleted interface 47cf42e1-cff0-4bcb-9b54-03051121bd6b; detaching it from the instance and deleting it from the info cache [ 2466.048095] env[62684]: DEBUG nova.network.neutron [req-07d70c72-17bc-408b-87a2-72e8299a8573 req-ea406bee-abc2-4084-b13f-d8c4b633d0fb service nova] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2466.184708] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1f63f1b2-63bf-4398-8c89-1b25e6479932 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Releasing lock "refresh_cache-4081f322-a854-475a-9a66-3d573128f39d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2466.243380] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6c83dfd6-7081-44e0-98dd-452cf225b4e8 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.632s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2466.266803] env[62684]: INFO nova.scheduler.client.report [None req-6c83dfd6-7081-44e0-98dd-452cf225b4e8 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Deleted allocations for instance 603b2c96-44f1-45a7-8209-b799662a3e42 [ 2466.527723] env[62684]: DEBUG nova.network.neutron [-] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2466.550824] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-87eca66c-3216-48a2-a796-05e94882651e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2466.560347] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b10fff8-0097-408a-9307-1d7ea45e6fe9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2466.585454] env[62684]: DEBUG nova.compute.manager [req-07d70c72-17bc-408b-87a2-72e8299a8573 req-ea406bee-abc2-4084-b13f-d8c4b633d0fb service nova] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Detach interface failed, port_id=47cf42e1-cff0-4bcb-9b54-03051121bd6b, reason: Instance b0ddbec0-d578-46df-93fd-9d38c939bd77 could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2466.687822] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f63f1b2-63bf-4398-8c89-1b25e6479932 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2466.688175] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a393ba18-67f7-493c-86f8-50a5763914fe {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2466.696874] env[62684]: DEBUG oslo_vmware.api [None req-1f63f1b2-63bf-4398-8c89-1b25e6479932 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2466.696874] env[62684]: value = "task-2053913" [ 2466.696874] env[62684]: _type = "Task" [ 2466.696874] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2466.705100] env[62684]: DEBUG oslo_vmware.api [None req-1f63f1b2-63bf-4398-8c89-1b25e6479932 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053913, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2466.776233] env[62684]: DEBUG oslo_concurrency.lockutils [None req-6c83dfd6-7081-44e0-98dd-452cf225b4e8 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "603b2c96-44f1-45a7-8209-b799662a3e42" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.320s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2467.031089] env[62684]: INFO nova.compute.manager [-] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Took 1.22 seconds to deallocate network for instance. [ 2467.208300] env[62684]: DEBUG oslo_vmware.api [None req-1f63f1b2-63bf-4398-8c89-1b25e6479932 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053913, 'name': PowerOnVM_Task, 'duration_secs': 0.441547} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2467.208686] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f63f1b2-63bf-4398-8c89-1b25e6479932 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2467.208952] env[62684]: DEBUG nova.compute.manager [None req-1f63f1b2-63bf-4398-8c89-1b25e6479932 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2467.209788] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31307bf4-9c43-4eea-ba7e-4a4bf0665f65 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2467.539692] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a7218c7c-9ed1-4b2b-9bf0-1036a8988dd9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2467.540014] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a7218c7c-9ed1-4b2b-9bf0-1036a8988dd9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2467.540299] env[62684]: DEBUG nova.objects.instance [None req-a7218c7c-9ed1-4b2b-9bf0-1036a8988dd9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lazy-loading 'resources' on Instance uuid b0ddbec0-d578-46df-93fd-9d38c939bd77 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2468.100438] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e8985c7-29a9-407b-9aea-54e8b1e5b158 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2468.107904] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4251d5e-a4b1-49ea-9541-56a7c039780f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2468.138195] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bed978e-284b-4674-b42d-93e05afd6235 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2468.145800] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1587ef3-64ce-4ee2-922a-2b643fe7ae51 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2468.158756] env[62684]: DEBUG nova.compute.provider_tree [None req-a7218c7c-9ed1-4b2b-9bf0-1036a8988dd9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2468.452159] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "84773cfa-edee-44bc-b89d-490d1fef5417" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2468.452431] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "84773cfa-edee-44bc-b89d-490d1fef5417" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2468.662108] env[62684]: DEBUG nova.scheduler.client.report [None req-a7218c7c-9ed1-4b2b-9bf0-1036a8988dd9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2468.780378] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a709670c-8f2d-4339-b5dc-2f164c148ba0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2468.787211] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-cb238be1-d3c2-4525-bc1f-98d89ce158cd tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Suspending the VM {{(pid=62684) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 2468.787497] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-6475d4e0-de8a-43af-90c9-ab9c23df5971 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2468.794185] env[62684]: DEBUG oslo_vmware.api [None req-cb238be1-d3c2-4525-bc1f-98d89ce158cd tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2468.794185] env[62684]: value = "task-2053914" [ 2468.794185] env[62684]: _type = "Task" [ 2468.794185] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2468.802256] env[62684]: DEBUG oslo_vmware.api [None req-cb238be1-d3c2-4525-bc1f-98d89ce158cd tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053914, 'name': SuspendVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2468.955159] env[62684]: DEBUG nova.compute.manager [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2469.167019] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a7218c7c-9ed1-4b2b-9bf0-1036a8988dd9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.627s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2469.188931] env[62684]: INFO nova.scheduler.client.report [None req-a7218c7c-9ed1-4b2b-9bf0-1036a8988dd9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Deleted allocations for instance b0ddbec0-d578-46df-93fd-9d38c939bd77 [ 2469.304669] env[62684]: DEBUG oslo_vmware.api [None req-cb238be1-d3c2-4525-bc1f-98d89ce158cd tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053914, 'name': SuspendVM_Task} progress is 75%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2469.473574] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2469.473855] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2469.475480] env[62684]: INFO nova.compute.claims [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2469.697109] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a7218c7c-9ed1-4b2b-9bf0-1036a8988dd9 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "b0ddbec0-d578-46df-93fd-9d38c939bd77" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.050s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2469.804709] env[62684]: DEBUG oslo_vmware.api [None req-cb238be1-d3c2-4525-bc1f-98d89ce158cd tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053914, 'name': SuspendVM_Task, 'duration_secs': 0.66652} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2469.804961] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-cb238be1-d3c2-4525-bc1f-98d89ce158cd tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Suspended the VM {{(pid=62684) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 2469.805174] env[62684]: DEBUG nova.compute.manager [None req-cb238be1-d3c2-4525-bc1f-98d89ce158cd tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2469.805942] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54b94ebe-725b-4147-8537-3b5d55060b2b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2470.529237] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64030eb4-7082-47a2-b183-0e6e24acc330 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2470.538477] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a18a9925-8cca-4f04-9c97-4eec3d508786 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2470.567215] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43a932af-a9b7-4ed4-b24a-b5fd7be1499e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2470.574285] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5834aead-ddf3-4fa2-abc9-9e7432be9d10 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2470.586957] env[62684]: DEBUG nova.compute.provider_tree [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2471.090059] env[62684]: DEBUG nova.scheduler.client.report [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2471.133104] env[62684]: INFO nova.compute.manager [None req-0f88f8a9-6ec7-49b3-801c-e7229b295add tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Resuming [ 2471.133785] env[62684]: DEBUG nova.objects.instance [None req-0f88f8a9-6ec7-49b3-801c-e7229b295add tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lazy-loading 'flavor' on Instance uuid 4081f322-a854-475a-9a66-3d573128f39d {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2471.595410] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.121s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2471.595956] env[62684]: DEBUG nova.compute.manager [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2472.101054] env[62684]: DEBUG nova.compute.utils [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2472.102368] env[62684]: DEBUG nova.compute.manager [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2472.102540] env[62684]: DEBUG nova.network.neutron [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2472.140709] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0f88f8a9-6ec7-49b3-801c-e7229b295add tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "refresh_cache-4081f322-a854-475a-9a66-3d573128f39d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2472.140875] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0f88f8a9-6ec7-49b3-801c-e7229b295add tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquired lock "refresh_cache-4081f322-a854-475a-9a66-3d573128f39d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2472.141064] env[62684]: DEBUG nova.network.neutron [None req-0f88f8a9-6ec7-49b3-801c-e7229b295add tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2472.148876] env[62684]: DEBUG nova.policy [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '58ea1db87d2b44408282a8b82d799443', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '947e7359aaba456fa1763f4dc8e9d359', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2472.406625] env[62684]: DEBUG nova.network.neutron [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Successfully created port: 23ff2d84-2b97-42ec-af87-1ac96cada09b {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2472.606213] env[62684]: DEBUG nova.compute.manager [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2472.926307] env[62684]: DEBUG nova.network.neutron [None req-0f88f8a9-6ec7-49b3-801c-e7229b295add tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Updating instance_info_cache with network_info: [{"id": "c3974c79-c5f4-4ea4-88b0-b0e0a4e63b1f", "address": "fa:16:3e:09:e0:b3", "network": {"id": "1751424b-54a9-4879-9f32-aa15a9bb632c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-120070593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "263c101fcc5e493789b79dfd1ba97cc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92e4d027-e755-417b-8eea-9a8f24b85140", "external-id": "nsx-vlan-transportzone-756", "segmentation_id": 756, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3974c79-c5", "ovs_interfaceid": "c3974c79-c5f4-4ea4-88b0-b0e0a4e63b1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2473.227868] env[62684]: DEBUG oslo_concurrency.lockutils [None req-677135a0-ad6c-4c13-aeba-1bec27414e75 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "0156d807-1ab4-482f-91d1-172bf32bf23c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2473.228054] env[62684]: DEBUG oslo_concurrency.lockutils [None req-677135a0-ad6c-4c13-aeba-1bec27414e75 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "0156d807-1ab4-482f-91d1-172bf32bf23c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2473.228317] env[62684]: DEBUG oslo_concurrency.lockutils [None req-677135a0-ad6c-4c13-aeba-1bec27414e75 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "0156d807-1ab4-482f-91d1-172bf32bf23c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2473.228552] env[62684]: DEBUG oslo_concurrency.lockutils [None req-677135a0-ad6c-4c13-aeba-1bec27414e75 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "0156d807-1ab4-482f-91d1-172bf32bf23c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2473.228743] env[62684]: DEBUG oslo_concurrency.lockutils [None req-677135a0-ad6c-4c13-aeba-1bec27414e75 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "0156d807-1ab4-482f-91d1-172bf32bf23c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2473.230893] env[62684]: INFO nova.compute.manager [None req-677135a0-ad6c-4c13-aeba-1bec27414e75 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Terminating instance [ 2473.232578] env[62684]: DEBUG nova.compute.manager [None req-677135a0-ad6c-4c13-aeba-1bec27414e75 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2473.232774] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-677135a0-ad6c-4c13-aeba-1bec27414e75 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2473.233667] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28318a55-1d5f-450a-ba9c-dad6e8ebd2c6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2473.240926] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-677135a0-ad6c-4c13-aeba-1bec27414e75 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2473.241171] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-933f1134-e38c-49d3-92c5-2f170526e673 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2473.246546] env[62684]: DEBUG oslo_vmware.api [None req-677135a0-ad6c-4c13-aeba-1bec27414e75 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2473.246546] env[62684]: value = "task-2053916" [ 2473.246546] env[62684]: _type = "Task" [ 2473.246546] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2473.254014] env[62684]: DEBUG oslo_vmware.api [None req-677135a0-ad6c-4c13-aeba-1bec27414e75 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053916, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2473.429095] env[62684]: DEBUG oslo_concurrency.lockutils [None req-0f88f8a9-6ec7-49b3-801c-e7229b295add tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Releasing lock "refresh_cache-4081f322-a854-475a-9a66-3d573128f39d" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2473.430066] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1d0ae40-3d1e-4dea-b124-61cf5cfe85c5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2473.436577] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0f88f8a9-6ec7-49b3-801c-e7229b295add tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Resuming the VM {{(pid=62684) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 2473.436807] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-67af2b84-2432-467f-ad68-7c6c2fb0ccc9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2473.442669] env[62684]: DEBUG oslo_vmware.api [None req-0f88f8a9-6ec7-49b3-801c-e7229b295add tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2473.442669] env[62684]: value = "task-2053917" [ 2473.442669] env[62684]: _type = "Task" [ 2473.442669] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2473.450946] env[62684]: DEBUG oslo_vmware.api [None req-0f88f8a9-6ec7-49b3-801c-e7229b295add tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053917, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2473.615831] env[62684]: DEBUG nova.compute.manager [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2473.642041] env[62684]: DEBUG nova.virt.hardware [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2473.642336] env[62684]: DEBUG nova.virt.hardware [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2473.642509] env[62684]: DEBUG nova.virt.hardware [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2473.642698] env[62684]: DEBUG nova.virt.hardware [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2473.642849] env[62684]: DEBUG nova.virt.hardware [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2473.643008] env[62684]: DEBUG nova.virt.hardware [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2473.643246] env[62684]: DEBUG nova.virt.hardware [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2473.643431] env[62684]: DEBUG nova.virt.hardware [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2473.643611] env[62684]: DEBUG nova.virt.hardware [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2473.643781] env[62684]: DEBUG nova.virt.hardware [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2473.643960] env[62684]: DEBUG nova.virt.hardware [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2473.644841] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ccea0e-3f72-4d00-86b8-fc2f9ef69e0e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2473.652140] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-319fab40-63a4-47f9-a45b-98d9f1ff5fd3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2473.756126] env[62684]: DEBUG oslo_vmware.api [None req-677135a0-ad6c-4c13-aeba-1bec27414e75 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053916, 'name': PowerOffVM_Task, 'duration_secs': 0.228324} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2473.756439] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-677135a0-ad6c-4c13-aeba-1bec27414e75 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2473.756738] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-677135a0-ad6c-4c13-aeba-1bec27414e75 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2473.756907] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b502a5ae-1247-47e4-b1e9-368bbc977c7f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2473.954381] env[62684]: DEBUG oslo_vmware.api [None req-0f88f8a9-6ec7-49b3-801c-e7229b295add tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053917, 'name': PowerOnVM_Task, 'duration_secs': 0.498256} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2473.954732] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-0f88f8a9-6ec7-49b3-801c-e7229b295add tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Resumed the VM {{(pid=62684) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 2473.954905] env[62684]: DEBUG nova.compute.manager [None req-0f88f8a9-6ec7-49b3-801c-e7229b295add tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2473.955712] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29166c62-003c-426c-998c-537da4ec241d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2474.762562] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c4bf3105-6e3f-48a2-9bde-e2d4f53d3342 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "4081f322-a854-475a-9a66-3d573128f39d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2474.762936] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c4bf3105-6e3f-48a2-9bde-e2d4f53d3342 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "4081f322-a854-475a-9a66-3d573128f39d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2474.763071] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c4bf3105-6e3f-48a2-9bde-e2d4f53d3342 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "4081f322-a854-475a-9a66-3d573128f39d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2474.763292] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c4bf3105-6e3f-48a2-9bde-e2d4f53d3342 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "4081f322-a854-475a-9a66-3d573128f39d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2474.763459] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c4bf3105-6e3f-48a2-9bde-e2d4f53d3342 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "4081f322-a854-475a-9a66-3d573128f39d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2474.765557] env[62684]: INFO nova.compute.manager [None req-c4bf3105-6e3f-48a2-9bde-e2d4f53d3342 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Terminating instance [ 2474.767260] env[62684]: DEBUG nova.compute.manager [None req-c4bf3105-6e3f-48a2-9bde-e2d4f53d3342 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2474.767462] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c4bf3105-6e3f-48a2-9bde-e2d4f53d3342 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2474.768309] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84f906d2-5799-45df-9fed-96a00d56c367 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2474.775985] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4bf3105-6e3f-48a2-9bde-e2d4f53d3342 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2474.776224] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1b61edee-ae99-4be7-996a-4665a7f0537f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2474.781632] env[62684]: DEBUG oslo_vmware.api [None req-c4bf3105-6e3f-48a2-9bde-e2d4f53d3342 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2474.781632] env[62684]: value = "task-2053919" [ 2474.781632] env[62684]: _type = "Task" [ 2474.781632] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2474.789385] env[62684]: DEBUG oslo_vmware.api [None req-c4bf3105-6e3f-48a2-9bde-e2d4f53d3342 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053919, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2475.291253] env[62684]: DEBUG oslo_vmware.api [None req-c4bf3105-6e3f-48a2-9bde-e2d4f53d3342 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053919, 'name': PowerOffVM_Task, 'duration_secs': 0.185816} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2475.291988] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4bf3105-6e3f-48a2-9bde-e2d4f53d3342 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2475.292234] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c4bf3105-6e3f-48a2-9bde-e2d4f53d3342 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2475.292508] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0ceb7b84-a2be-444c-81f1-da0e86800770 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2479.178143] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c4bf3105-6e3f-48a2-9bde-e2d4f53d3342 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2479.178143] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c4bf3105-6e3f-48a2-9bde-e2d4f53d3342 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2479.178143] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4bf3105-6e3f-48a2-9bde-e2d4f53d3342 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Deleting the datastore file [datastore1] 4081f322-a854-475a-9a66-3d573128f39d {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2479.178782] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a9d55fb9-faa4-4648-83a2-a84faad01fb3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2479.185646] env[62684]: DEBUG oslo_vmware.api [None req-c4bf3105-6e3f-48a2-9bde-e2d4f53d3342 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for the task: (returnval){ [ 2479.185646] env[62684]: value = "task-2053921" [ 2479.185646] env[62684]: _type = "Task" [ 2479.185646] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2479.191709] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-677135a0-ad6c-4c13-aeba-1bec27414e75 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2479.191929] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-677135a0-ad6c-4c13-aeba-1bec27414e75 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2479.192124] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-677135a0-ad6c-4c13-aeba-1bec27414e75 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Deleting the datastore file [datastore1] 0156d807-1ab4-482f-91d1-172bf32bf23c {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2479.195134] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d5529df9-f85e-4b6e-adac-6377a9692ba7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2479.196862] env[62684]: DEBUG oslo_vmware.api [None req-c4bf3105-6e3f-48a2-9bde-e2d4f53d3342 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053921, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2479.200911] env[62684]: DEBUG oslo_vmware.api [None req-677135a0-ad6c-4c13-aeba-1bec27414e75 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for the task: (returnval){ [ 2479.200911] env[62684]: value = "task-2053922" [ 2479.200911] env[62684]: _type = "Task" [ 2479.200911] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2479.208753] env[62684]: DEBUG oslo_vmware.api [None req-677135a0-ad6c-4c13-aeba-1bec27414e75 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053922, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2479.695572] env[62684]: DEBUG oslo_vmware.api [None req-c4bf3105-6e3f-48a2-9bde-e2d4f53d3342 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Task: {'id': task-2053921, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.163895} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2479.695866] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4bf3105-6e3f-48a2-9bde-e2d4f53d3342 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2479.696068] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c4bf3105-6e3f-48a2-9bde-e2d4f53d3342 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2479.696257] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-c4bf3105-6e3f-48a2-9bde-e2d4f53d3342 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2479.696440] env[62684]: INFO nova.compute.manager [None req-c4bf3105-6e3f-48a2-9bde-e2d4f53d3342 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Took 4.93 seconds to destroy the instance on the hypervisor. [ 2479.696694] env[62684]: DEBUG oslo.service.loopingcall [None req-c4bf3105-6e3f-48a2-9bde-e2d4f53d3342 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2479.696889] env[62684]: DEBUG nova.compute.manager [-] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2479.696984] env[62684]: DEBUG nova.network.neutron [-] [instance: 4081f322-a854-475a-9a66-3d573128f39d] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2479.709422] env[62684]: DEBUG oslo_vmware.api [None req-677135a0-ad6c-4c13-aeba-1bec27414e75 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Task: {'id': task-2053922, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151493} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2479.709614] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-677135a0-ad6c-4c13-aeba-1bec27414e75 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2479.709789] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-677135a0-ad6c-4c13-aeba-1bec27414e75 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2479.709963] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-677135a0-ad6c-4c13-aeba-1bec27414e75 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2479.710151] env[62684]: INFO nova.compute.manager [None req-677135a0-ad6c-4c13-aeba-1bec27414e75 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Took 6.48 seconds to destroy the instance on the hypervisor. [ 2479.710376] env[62684]: DEBUG oslo.service.loopingcall [None req-677135a0-ad6c-4c13-aeba-1bec27414e75 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2479.710559] env[62684]: DEBUG nova.compute.manager [-] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2479.710653] env[62684]: DEBUG nova.network.neutron [-] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2480.048617] env[62684]: DEBUG nova.compute.manager [req-492d4654-a9da-4954-bec1-3c6b146acf42 req-77b92b13-a1df-4300-9b68-ab9b3e4ddb5f service nova] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Received event network-vif-deleted-f5c06971-b96a-4fa0-858e-5e47100e2e68 {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2480.048822] env[62684]: INFO nova.compute.manager [req-492d4654-a9da-4954-bec1-3c6b146acf42 req-77b92b13-a1df-4300-9b68-ab9b3e4ddb5f service nova] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Neutron deleted interface f5c06971-b96a-4fa0-858e-5e47100e2e68; detaching it from the instance and deleting it from the info cache [ 2480.048996] env[62684]: DEBUG nova.network.neutron [req-492d4654-a9da-4954-bec1-3c6b146acf42 req-77b92b13-a1df-4300-9b68-ab9b3e4ddb5f service nova] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2480.291208] env[62684]: DEBUG nova.compute.manager [req-6cb0ff75-f407-444f-a17e-7bc5c0f59f6f req-519cb9a6-dc12-4c96-ad11-121f446c1be4 service nova] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Received event network-vif-plugged-23ff2d84-2b97-42ec-af87-1ac96cada09b {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2480.291603] env[62684]: DEBUG oslo_concurrency.lockutils [req-6cb0ff75-f407-444f-a17e-7bc5c0f59f6f req-519cb9a6-dc12-4c96-ad11-121f446c1be4 service nova] Acquiring lock "84773cfa-edee-44bc-b89d-490d1fef5417-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2480.291648] env[62684]: DEBUG oslo_concurrency.lockutils [req-6cb0ff75-f407-444f-a17e-7bc5c0f59f6f req-519cb9a6-dc12-4c96-ad11-121f446c1be4 service nova] Lock "84773cfa-edee-44bc-b89d-490d1fef5417-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2480.291917] env[62684]: DEBUG oslo_concurrency.lockutils [req-6cb0ff75-f407-444f-a17e-7bc5c0f59f6f req-519cb9a6-dc12-4c96-ad11-121f446c1be4 service nova] Lock "84773cfa-edee-44bc-b89d-490d1fef5417-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2480.292015] env[62684]: DEBUG nova.compute.manager [req-6cb0ff75-f407-444f-a17e-7bc5c0f59f6f req-519cb9a6-dc12-4c96-ad11-121f446c1be4 service nova] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] No waiting events found dispatching network-vif-plugged-23ff2d84-2b97-42ec-af87-1ac96cada09b {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2480.292212] env[62684]: WARNING nova.compute.manager [req-6cb0ff75-f407-444f-a17e-7bc5c0f59f6f req-519cb9a6-dc12-4c96-ad11-121f446c1be4 service nova] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Received unexpected event network-vif-plugged-23ff2d84-2b97-42ec-af87-1ac96cada09b for instance with vm_state building and task_state spawning. [ 2480.386785] env[62684]: DEBUG nova.network.neutron [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Successfully updated port: 23ff2d84-2b97-42ec-af87-1ac96cada09b {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2480.522104] env[62684]: DEBUG nova.network.neutron [-] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2480.552016] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8e8942ef-ad8b-49bf-b536-897f82a543a1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2480.562171] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccca3110-0e13-4873-b92d-83b27b8c88a7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2480.585393] env[62684]: DEBUG nova.compute.manager [req-492d4654-a9da-4954-bec1-3c6b146acf42 req-77b92b13-a1df-4300-9b68-ab9b3e4ddb5f service nova] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Detach interface failed, port_id=f5c06971-b96a-4fa0-858e-5e47100e2e68, reason: Instance 0156d807-1ab4-482f-91d1-172bf32bf23c could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2480.795075] env[62684]: DEBUG nova.network.neutron [-] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2480.893074] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "refresh_cache-84773cfa-edee-44bc-b89d-490d1fef5417" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2480.893074] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquired lock "refresh_cache-84773cfa-edee-44bc-b89d-490d1fef5417" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2480.893074] env[62684]: DEBUG nova.network.neutron [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2481.024673] env[62684]: INFO nova.compute.manager [-] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Took 1.31 seconds to deallocate network for instance. [ 2481.297824] env[62684]: INFO nova.compute.manager [-] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Took 1.60 seconds to deallocate network for instance. [ 2481.425060] env[62684]: DEBUG nova.network.neutron [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2481.532490] env[62684]: DEBUG oslo_concurrency.lockutils [None req-677135a0-ad6c-4c13-aeba-1bec27414e75 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2481.532824] env[62684]: DEBUG oslo_concurrency.lockutils [None req-677135a0-ad6c-4c13-aeba-1bec27414e75 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2481.533140] env[62684]: DEBUG nova.objects.instance [None req-677135a0-ad6c-4c13-aeba-1bec27414e75 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lazy-loading 'resources' on Instance uuid 0156d807-1ab4-482f-91d1-172bf32bf23c {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2481.722252] env[62684]: DEBUG nova.network.neutron [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Updating instance_info_cache with network_info: [{"id": "23ff2d84-2b97-42ec-af87-1ac96cada09b", "address": "fa:16:3e:29:a8:1f", "network": {"id": "8136a664-f757-43b3-a2fa-bacdf2e9566c", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1799567463-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "947e7359aaba456fa1763f4dc8e9d359", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cc30a16-f070-421c-964e-50c9aa32f17a", "external-id": "nsx-vlan-transportzone-424", "segmentation_id": 424, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23ff2d84-2b", "ovs_interfaceid": "23ff2d84-2b97-42ec-af87-1ac96cada09b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2481.804758] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c4bf3105-6e3f-48a2-9bde-e2d4f53d3342 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2482.073317] env[62684]: DEBUG nova.compute.manager [req-d426a147-e9b5-4e65-a84e-95ccfc08345c req-2c90f91d-e5e2-4d9d-a26b-5d9eaa8228ed service nova] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Received event network-vif-deleted-c3974c79-c5f4-4ea4-88b0-b0e0a4e63b1f {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2482.098303] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afbe85a0-c603-4769-babb-c17a8d96c410 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2482.106074] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78b6a7cd-3feb-4827-8b25-a72d2085796b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2482.136188] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb433504-399b-4d33-bc4f-b0b5d4dccb4d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2482.143604] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-223bed0d-de52-4e74-ae54-6fed290cece5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2482.156751] env[62684]: DEBUG nova.compute.provider_tree [None req-677135a0-ad6c-4c13-aeba-1bec27414e75 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2482.225591] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Releasing lock "refresh_cache-84773cfa-edee-44bc-b89d-490d1fef5417" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2482.225986] env[62684]: DEBUG nova.compute.manager [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Instance network_info: |[{"id": "23ff2d84-2b97-42ec-af87-1ac96cada09b", "address": "fa:16:3e:29:a8:1f", "network": {"id": "8136a664-f757-43b3-a2fa-bacdf2e9566c", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1799567463-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "947e7359aaba456fa1763f4dc8e9d359", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cc30a16-f070-421c-964e-50c9aa32f17a", "external-id": "nsx-vlan-transportzone-424", "segmentation_id": 424, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23ff2d84-2b", "ovs_interfaceid": "23ff2d84-2b97-42ec-af87-1ac96cada09b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2482.227022] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:29:a8:1f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0cc30a16-f070-421c-964e-50c9aa32f17a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '23ff2d84-2b97-42ec-af87-1ac96cada09b', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2482.234275] env[62684]: DEBUG oslo.service.loopingcall [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2482.234789] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2482.235056] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ba67994b-0a6f-4e9c-b254-ce177cf99391 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2482.255273] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2482.255273] env[62684]: value = "task-2053923" [ 2482.255273] env[62684]: _type = "Task" [ 2482.255273] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2482.267947] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053923, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2482.316779] env[62684]: DEBUG nova.compute.manager [req-95bca5fe-1d39-428f-86c6-a1e61a29109b req-3e41f16c-1865-4288-b221-60d30307ffb2 service nova] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Received event network-changed-23ff2d84-2b97-42ec-af87-1ac96cada09b {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2482.317033] env[62684]: DEBUG nova.compute.manager [req-95bca5fe-1d39-428f-86c6-a1e61a29109b req-3e41f16c-1865-4288-b221-60d30307ffb2 service nova] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Refreshing instance network info cache due to event network-changed-23ff2d84-2b97-42ec-af87-1ac96cada09b. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2482.317278] env[62684]: DEBUG oslo_concurrency.lockutils [req-95bca5fe-1d39-428f-86c6-a1e61a29109b req-3e41f16c-1865-4288-b221-60d30307ffb2 service nova] Acquiring lock "refresh_cache-84773cfa-edee-44bc-b89d-490d1fef5417" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2482.317361] env[62684]: DEBUG oslo_concurrency.lockutils [req-95bca5fe-1d39-428f-86c6-a1e61a29109b req-3e41f16c-1865-4288-b221-60d30307ffb2 service nova] Acquired lock "refresh_cache-84773cfa-edee-44bc-b89d-490d1fef5417" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2482.317531] env[62684]: DEBUG nova.network.neutron [req-95bca5fe-1d39-428f-86c6-a1e61a29109b req-3e41f16c-1865-4288-b221-60d30307ffb2 service nova] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Refreshing network info cache for port 23ff2d84-2b97-42ec-af87-1ac96cada09b {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2482.661070] env[62684]: DEBUG nova.scheduler.client.report [None req-677135a0-ad6c-4c13-aeba-1bec27414e75 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2482.765375] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053923, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2483.012085] env[62684]: DEBUG nova.network.neutron [req-95bca5fe-1d39-428f-86c6-a1e61a29109b req-3e41f16c-1865-4288-b221-60d30307ffb2 service nova] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Updated VIF entry in instance network info cache for port 23ff2d84-2b97-42ec-af87-1ac96cada09b. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2483.012467] env[62684]: DEBUG nova.network.neutron [req-95bca5fe-1d39-428f-86c6-a1e61a29109b req-3e41f16c-1865-4288-b221-60d30307ffb2 service nova] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Updating instance_info_cache with network_info: [{"id": "23ff2d84-2b97-42ec-af87-1ac96cada09b", "address": "fa:16:3e:29:a8:1f", "network": {"id": "8136a664-f757-43b3-a2fa-bacdf2e9566c", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1799567463-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "947e7359aaba456fa1763f4dc8e9d359", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cc30a16-f070-421c-964e-50c9aa32f17a", "external-id": "nsx-vlan-transportzone-424", "segmentation_id": 424, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23ff2d84-2b", "ovs_interfaceid": "23ff2d84-2b97-42ec-af87-1ac96cada09b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2483.165224] env[62684]: DEBUG oslo_concurrency.lockutils [None req-677135a0-ad6c-4c13-aeba-1bec27414e75 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.632s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2483.168029] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c4bf3105-6e3f-48a2-9bde-e2d4f53d3342 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.363s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2483.168029] env[62684]: DEBUG nova.objects.instance [None req-c4bf3105-6e3f-48a2-9bde-e2d4f53d3342 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lazy-loading 'resources' on Instance uuid 4081f322-a854-475a-9a66-3d573128f39d {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2483.182462] env[62684]: INFO nova.scheduler.client.report [None req-677135a0-ad6c-4c13-aeba-1bec27414e75 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Deleted allocations for instance 0156d807-1ab4-482f-91d1-172bf32bf23c [ 2483.267061] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053923, 'name': CreateVM_Task, 'duration_secs': 0.512494} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2483.267061] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2483.267061] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2483.267061] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2483.267380] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2483.267575] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-609acf96-5e0b-4735-8277-36ec5313d9ce {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2483.272016] env[62684]: DEBUG oslo_vmware.api [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2483.272016] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f751fb-d61b-a62d-85b7-c4cd0504e889" [ 2483.272016] env[62684]: _type = "Task" [ 2483.272016] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2483.278958] env[62684]: DEBUG oslo_vmware.api [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f751fb-d61b-a62d-85b7-c4cd0504e889, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2483.515628] env[62684]: DEBUG oslo_concurrency.lockutils [req-95bca5fe-1d39-428f-86c6-a1e61a29109b req-3e41f16c-1865-4288-b221-60d30307ffb2 service nova] Releasing lock "refresh_cache-84773cfa-edee-44bc-b89d-490d1fef5417" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2483.689138] env[62684]: DEBUG oslo_concurrency.lockutils [None req-677135a0-ad6c-4c13-aeba-1bec27414e75 tempest-ServerActionsTestOtherA-43335336 tempest-ServerActionsTestOtherA-43335336-project-member] Lock "0156d807-1ab4-482f-91d1-172bf32bf23c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.461s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2483.708564] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e850ad78-757a-4ed6-92ca-f337d948ad6a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2483.716770] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69b334f6-f752-4087-9bac-58aaea16a7c8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2483.745865] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-128d4835-a39f-4f92-828c-8fe1a398a769 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2483.752964] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc344956-a0a3-4175-9bec-ab8390904362 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2483.766308] env[62684]: DEBUG nova.compute.provider_tree [None req-c4bf3105-6e3f-48a2-9bde-e2d4f53d3342 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2483.782045] env[62684]: DEBUG oslo_vmware.api [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f751fb-d61b-a62d-85b7-c4cd0504e889, 'name': SearchDatastore_Task, 'duration_secs': 0.009861} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2483.782045] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2483.782045] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2483.782045] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2483.782045] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2483.782305] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2483.782351] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-08e92258-3121-41bd-b2a6-99949c61a07b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2483.791869] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2483.792058] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2483.792756] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24ebc95a-2dea-4e73-9b92-df164e458fcc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2483.797303] env[62684]: DEBUG oslo_vmware.api [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2483.797303] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5207970c-58c1-a13d-21f7-d01d082229c5" [ 2483.797303] env[62684]: _type = "Task" [ 2483.797303] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2483.804707] env[62684]: DEBUG oslo_vmware.api [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5207970c-58c1-a13d-21f7-d01d082229c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2484.269475] env[62684]: DEBUG nova.scheduler.client.report [None req-c4bf3105-6e3f-48a2-9bde-e2d4f53d3342 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2484.307642] env[62684]: DEBUG oslo_vmware.api [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5207970c-58c1-a13d-21f7-d01d082229c5, 'name': SearchDatastore_Task, 'duration_secs': 0.008716} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2484.308398] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e213167e-b967-4466-b899-6fc708b46c2a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2484.313354] env[62684]: DEBUG oslo_vmware.api [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2484.313354] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52077209-87ab-76c8-b6f8-2a769df01a47" [ 2484.313354] env[62684]: _type = "Task" [ 2484.313354] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2484.321272] env[62684]: DEBUG oslo_vmware.api [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52077209-87ab-76c8-b6f8-2a769df01a47, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2484.774612] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c4bf3105-6e3f-48a2-9bde-e2d4f53d3342 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.607s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2484.802057] env[62684]: INFO nova.scheduler.client.report [None req-c4bf3105-6e3f-48a2-9bde-e2d4f53d3342 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Deleted allocations for instance 4081f322-a854-475a-9a66-3d573128f39d [ 2484.832199] env[62684]: DEBUG oslo_vmware.api [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52077209-87ab-76c8-b6f8-2a769df01a47, 'name': SearchDatastore_Task, 'duration_secs': 0.009318} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2484.833090] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2484.834328] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 84773cfa-edee-44bc-b89d-490d1fef5417/84773cfa-edee-44bc-b89d-490d1fef5417.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2484.834328] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0e6e95ec-9a47-49dc-bd46-56b932eceb5d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2484.842036] env[62684]: DEBUG oslo_vmware.api [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2484.842036] env[62684]: value = "task-2053924" [ 2484.842036] env[62684]: _type = "Task" [ 2484.842036] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2484.851260] env[62684]: DEBUG oslo_vmware.api [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053924, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2485.314012] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c4bf3105-6e3f-48a2-9bde-e2d4f53d3342 tempest-ServerActionsTestJSON-275262807 tempest-ServerActionsTestJSON-275262807-project-member] Lock "4081f322-a854-475a-9a66-3d573128f39d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.551s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2485.352266] env[62684]: DEBUG oslo_vmware.api [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053924, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.442577} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2485.352652] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore2] 84773cfa-edee-44bc-b89d-490d1fef5417/84773cfa-edee-44bc-b89d-490d1fef5417.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2485.352818] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2485.353092] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2799dcaa-f763-49ce-9ec4-0ffb31cb9816 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2485.360557] env[62684]: DEBUG oslo_vmware.api [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2485.360557] env[62684]: value = "task-2053925" [ 2485.360557] env[62684]: _type = "Task" [ 2485.360557] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2485.368210] env[62684]: DEBUG oslo_vmware.api [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053925, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2485.870426] env[62684]: DEBUG oslo_vmware.api [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053925, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060229} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2485.870782] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2485.871631] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d4543a6-e5ec-4c2e-aaa8-ae651b61f3f5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2485.893855] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Reconfiguring VM instance instance-00000079 to attach disk [datastore2] 84773cfa-edee-44bc-b89d-490d1fef5417/84773cfa-edee-44bc-b89d-490d1fef5417.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2485.894371] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fecb0d96-4039-475f-ab90-5f4dfca73a1e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2485.913913] env[62684]: DEBUG oslo_vmware.api [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2485.913913] env[62684]: value = "task-2053926" [ 2485.913913] env[62684]: _type = "Task" [ 2485.913913] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2485.923237] env[62684]: DEBUG oslo_vmware.api [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053926, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2486.425257] env[62684]: DEBUG oslo_vmware.api [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053926, 'name': ReconfigVM_Task, 'duration_secs': 0.254947} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2486.425567] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Reconfigured VM instance instance-00000079 to attach disk [datastore2] 84773cfa-edee-44bc-b89d-490d1fef5417/84773cfa-edee-44bc-b89d-490d1fef5417.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2486.426231] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d525559c-7d2e-41b8-80f2-f32937d21fdd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2486.432940] env[62684]: DEBUG oslo_vmware.api [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2486.432940] env[62684]: value = "task-2053927" [ 2486.432940] env[62684]: _type = "Task" [ 2486.432940] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2486.441173] env[62684]: DEBUG oslo_vmware.api [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053927, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2486.944359] env[62684]: DEBUG oslo_vmware.api [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053927, 'name': Rename_Task, 'duration_secs': 0.131486} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2486.944359] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2486.944359] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e8880678-e279-44c9-9272-0884a6071660 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2486.949454] env[62684]: DEBUG oslo_vmware.api [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2486.949454] env[62684]: value = "task-2053928" [ 2486.949454] env[62684]: _type = "Task" [ 2486.949454] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2486.957207] env[62684]: DEBUG oslo_vmware.api [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053928, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2487.469949] env[62684]: DEBUG oslo_vmware.api [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053928, 'name': PowerOnVM_Task, 'duration_secs': 0.471656} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2487.469949] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2487.469949] env[62684]: INFO nova.compute.manager [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Took 13.85 seconds to spawn the instance on the hypervisor. [ 2487.469949] env[62684]: DEBUG nova.compute.manager [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2487.470857] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e6dadbf-9364-4e8a-b311-a98178ef0523 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2488.000722] env[62684]: INFO nova.compute.manager [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Took 18.54 seconds to build instance. [ 2488.505575] env[62684]: DEBUG oslo_concurrency.lockutils [None req-71320021-8708-4cd9-837a-3b0bb414c901 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "84773cfa-edee-44bc-b89d-490d1fef5417" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.053s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2488.865884] env[62684]: DEBUG nova.compute.manager [req-23c93acb-92a0-4e96-963b-f711ce328eec req-52e8983d-e920-417d-b0c7-cf1335cee187 service nova] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Received event network-changed-23ff2d84-2b97-42ec-af87-1ac96cada09b {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2488.866350] env[62684]: DEBUG nova.compute.manager [req-23c93acb-92a0-4e96-963b-f711ce328eec req-52e8983d-e920-417d-b0c7-cf1335cee187 service nova] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Refreshing instance network info cache due to event network-changed-23ff2d84-2b97-42ec-af87-1ac96cada09b. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2488.866913] env[62684]: DEBUG oslo_concurrency.lockutils [req-23c93acb-92a0-4e96-963b-f711ce328eec req-52e8983d-e920-417d-b0c7-cf1335cee187 service nova] Acquiring lock "refresh_cache-84773cfa-edee-44bc-b89d-490d1fef5417" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2488.867069] env[62684]: DEBUG oslo_concurrency.lockutils [req-23c93acb-92a0-4e96-963b-f711ce328eec req-52e8983d-e920-417d-b0c7-cf1335cee187 service nova] Acquired lock "refresh_cache-84773cfa-edee-44bc-b89d-490d1fef5417" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2488.869023] env[62684]: DEBUG nova.network.neutron [req-23c93acb-92a0-4e96-963b-f711ce328eec req-52e8983d-e920-417d-b0c7-cf1335cee187 service nova] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Refreshing network info cache for port 23ff2d84-2b97-42ec-af87-1ac96cada09b {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2489.748448] env[62684]: DEBUG nova.network.neutron [req-23c93acb-92a0-4e96-963b-f711ce328eec req-52e8983d-e920-417d-b0c7-cf1335cee187 service nova] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Updated VIF entry in instance network info cache for port 23ff2d84-2b97-42ec-af87-1ac96cada09b. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2489.748448] env[62684]: DEBUG nova.network.neutron [req-23c93acb-92a0-4e96-963b-f711ce328eec req-52e8983d-e920-417d-b0c7-cf1335cee187 service nova] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Updating instance_info_cache with network_info: [{"id": "23ff2d84-2b97-42ec-af87-1ac96cada09b", "address": "fa:16:3e:29:a8:1f", "network": {"id": "8136a664-f757-43b3-a2fa-bacdf2e9566c", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1799567463-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.222", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "947e7359aaba456fa1763f4dc8e9d359", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cc30a16-f070-421c-964e-50c9aa32f17a", "external-id": "nsx-vlan-transportzone-424", "segmentation_id": 424, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23ff2d84-2b", "ovs_interfaceid": "23ff2d84-2b97-42ec-af87-1ac96cada09b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2490.251342] env[62684]: DEBUG oslo_concurrency.lockutils [req-23c93acb-92a0-4e96-963b-f711ce328eec req-52e8983d-e920-417d-b0c7-cf1335cee187 service nova] Releasing lock "refresh_cache-84773cfa-edee-44bc-b89d-490d1fef5417" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2492.072439] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "0c7a3cd8-b42a-4111-bcfd-8fcd15b51028" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2492.072889] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "0c7a3cd8-b42a-4111-bcfd-8fcd15b51028" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2492.579592] env[62684]: DEBUG nova.compute.manager [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2493.101213] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2493.101519] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2493.103107] env[62684]: INFO nova.compute.claims [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2494.147855] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b575d6fc-793c-45d7-83da-89befcc19ba6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2494.155542] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ca24237-183d-4dc7-ab3e-25521eb30778 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2494.184594] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01b33a9d-3976-4816-929a-8408da1ccde4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2494.192054] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2a78d88-f6af-4051-a798-47861955212d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2494.205420] env[62684]: DEBUG nova.compute.provider_tree [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2494.708774] env[62684]: DEBUG nova.scheduler.client.report [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2495.214247] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.113s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2495.214810] env[62684]: DEBUG nova.compute.manager [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2495.720235] env[62684]: DEBUG nova.compute.utils [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2495.721583] env[62684]: DEBUG nova.compute.manager [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2495.721840] env[62684]: DEBUG nova.network.neutron [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2495.759339] env[62684]: DEBUG nova.policy [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '22544927e67845a69c8ac324918f2e93', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '21534190adb0460e9a74363ae059a59d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2496.006355] env[62684]: DEBUG nova.network.neutron [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Successfully created port: a81e2573-af12-4cc6-93fb-463eedb4ed4b {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2496.225827] env[62684]: DEBUG nova.compute.manager [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2497.237256] env[62684]: DEBUG nova.compute.manager [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2497.263533] env[62684]: DEBUG nova.virt.hardware [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2497.263779] env[62684]: DEBUG nova.virt.hardware [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2497.263941] env[62684]: DEBUG nova.virt.hardware [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2497.264150] env[62684]: DEBUG nova.virt.hardware [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2497.264302] env[62684]: DEBUG nova.virt.hardware [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2497.264451] env[62684]: DEBUG nova.virt.hardware [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2497.264660] env[62684]: DEBUG nova.virt.hardware [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2497.264846] env[62684]: DEBUG nova.virt.hardware [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2497.265041] env[62684]: DEBUG nova.virt.hardware [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2497.265217] env[62684]: DEBUG nova.virt.hardware [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2497.265395] env[62684]: DEBUG nova.virt.hardware [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2497.266273] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7420aaf4-0002-4227-a6aa-cf77f0e3ca08 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2497.274229] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4539c572-c99e-4dd4-be5c-9bb47a623852 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2500.301025] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2500.804297] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2500.804560] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2500.804689] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2500.804866] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62684) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2500.805816] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-835a6b36-5fc9-4dc6-b5bc-8b59be301819 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2500.814134] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3f868f3-3955-410b-a4a4-a11f52e6c06b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2500.827428] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9151e23f-7b3e-4b4b-8d02-34bd679e63bc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2500.833335] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efc19101-8c59-4944-9155-586fdbfda096 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2500.862191] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180380MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=62684) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2500.862323] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2500.862524] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2501.889029] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 84773cfa-edee-44bc-b89d-490d1fef5417 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2501.889315] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2501.889376] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2501.889497] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2501.924649] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77f56e8d-16f4-44eb-b71a-93044fe19a84 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2501.932208] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c99ca8b5-9be4-48b5-96c8-a3c74b8a421a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2501.961780] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc56715e-4562-49f2-aa10-a5ba60395516 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2501.969183] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-113d5960-fcb8-420c-8043-61322e221b54 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2501.983098] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2502.485680] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2502.872466] env[62684]: DEBUG nova.compute.manager [req-441039b8-0df2-49f8-97e3-2492c6c372e9 req-ea2b80fa-b62c-4743-89fa-acfa18e75c79 service nova] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Received event network-vif-plugged-a81e2573-af12-4cc6-93fb-463eedb4ed4b {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2502.872746] env[62684]: DEBUG oslo_concurrency.lockutils [req-441039b8-0df2-49f8-97e3-2492c6c372e9 req-ea2b80fa-b62c-4743-89fa-acfa18e75c79 service nova] Acquiring lock "0c7a3cd8-b42a-4111-bcfd-8fcd15b51028-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2502.873333] env[62684]: DEBUG oslo_concurrency.lockutils [req-441039b8-0df2-49f8-97e3-2492c6c372e9 req-ea2b80fa-b62c-4743-89fa-acfa18e75c79 service nova] Lock "0c7a3cd8-b42a-4111-bcfd-8fcd15b51028-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2502.873548] env[62684]: DEBUG oslo_concurrency.lockutils [req-441039b8-0df2-49f8-97e3-2492c6c372e9 req-ea2b80fa-b62c-4743-89fa-acfa18e75c79 service nova] Lock "0c7a3cd8-b42a-4111-bcfd-8fcd15b51028-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2502.873787] env[62684]: DEBUG nova.compute.manager [req-441039b8-0df2-49f8-97e3-2492c6c372e9 req-ea2b80fa-b62c-4743-89fa-acfa18e75c79 service nova] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] No waiting events found dispatching network-vif-plugged-a81e2573-af12-4cc6-93fb-463eedb4ed4b {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2502.873932] env[62684]: WARNING nova.compute.manager [req-441039b8-0df2-49f8-97e3-2492c6c372e9 req-ea2b80fa-b62c-4743-89fa-acfa18e75c79 service nova] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Received unexpected event network-vif-plugged-a81e2573-af12-4cc6-93fb-463eedb4ed4b for instance with vm_state building and task_state spawning. [ 2502.991450] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62684) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2502.991850] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.129s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2503.121191] env[62684]: DEBUG nova.network.neutron [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Successfully updated port: a81e2573-af12-4cc6-93fb-463eedb4ed4b {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2503.624028] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "refresh_cache-0c7a3cd8-b42a-4111-bcfd-8fcd15b51028" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2503.624028] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquired lock "refresh_cache-0c7a3cd8-b42a-4111-bcfd-8fcd15b51028" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2503.624219] env[62684]: DEBUG nova.network.neutron [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2504.154677] env[62684]: DEBUG nova.network.neutron [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2504.273211] env[62684]: DEBUG nova.network.neutron [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Updating instance_info_cache with network_info: [{"id": "a81e2573-af12-4cc6-93fb-463eedb4ed4b", "address": "fa:16:3e:6b:cf:c0", "network": {"id": "93f58a85-0f77-4adc-88c5-bee1aa383535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1446072689-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21534190adb0460e9a74363ae059a59d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa81e2573-af", "ovs_interfaceid": "a81e2573-af12-4cc6-93fb-463eedb4ed4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2504.775753] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Releasing lock "refresh_cache-0c7a3cd8-b42a-4111-bcfd-8fcd15b51028" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2504.776102] env[62684]: DEBUG nova.compute.manager [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Instance network_info: |[{"id": "a81e2573-af12-4cc6-93fb-463eedb4ed4b", "address": "fa:16:3e:6b:cf:c0", "network": {"id": "93f58a85-0f77-4adc-88c5-bee1aa383535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1446072689-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21534190adb0460e9a74363ae059a59d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa81e2573-af", "ovs_interfaceid": "a81e2573-af12-4cc6-93fb-463eedb4ed4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2504.776564] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6b:cf:c0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7874ee7f-20c7-4bd8-a750-ed489e9acc65', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a81e2573-af12-4cc6-93fb-463eedb4ed4b', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2504.783844] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Creating folder: Project (21534190adb0460e9a74363ae059a59d). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2504.784131] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fec15b82-2c79-4e77-bbe2-f19fbae2eb1a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2504.796621] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Created folder: Project (21534190adb0460e9a74363ae059a59d) in parent group-v421118. [ 2504.796850] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Creating folder: Instances. Parent ref: group-v421431. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2504.797098] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-38d9b833-f232-468c-ac8a-31bab42d5b61 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2504.806244] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Created folder: Instances in parent group-v421431. [ 2504.806465] env[62684]: DEBUG oslo.service.loopingcall [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2504.806643] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2504.806832] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-343c8586-eee0-4431-b3ec-e1ad9e424ba2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2504.824676] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2504.824676] env[62684]: value = "task-2053931" [ 2504.824676] env[62684]: _type = "Task" [ 2504.824676] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2504.831520] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053931, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2504.991907] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2504.992111] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Starting heal instance info cache {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2505.055311] env[62684]: DEBUG nova.compute.manager [req-1bbe9e06-2a09-4e8a-ae2f-7ad2ad09f6bf req-d5387dac-401a-4ed5-a20a-f448b508be19 service nova] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Received event network-changed-a81e2573-af12-4cc6-93fb-463eedb4ed4b {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2505.055586] env[62684]: DEBUG nova.compute.manager [req-1bbe9e06-2a09-4e8a-ae2f-7ad2ad09f6bf req-d5387dac-401a-4ed5-a20a-f448b508be19 service nova] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Refreshing instance network info cache due to event network-changed-a81e2573-af12-4cc6-93fb-463eedb4ed4b. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2505.055876] env[62684]: DEBUG oslo_concurrency.lockutils [req-1bbe9e06-2a09-4e8a-ae2f-7ad2ad09f6bf req-d5387dac-401a-4ed5-a20a-f448b508be19 service nova] Acquiring lock "refresh_cache-0c7a3cd8-b42a-4111-bcfd-8fcd15b51028" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2505.056292] env[62684]: DEBUG oslo_concurrency.lockutils [req-1bbe9e06-2a09-4e8a-ae2f-7ad2ad09f6bf req-d5387dac-401a-4ed5-a20a-f448b508be19 service nova] Acquired lock "refresh_cache-0c7a3cd8-b42a-4111-bcfd-8fcd15b51028" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2505.056520] env[62684]: DEBUG nova.network.neutron [req-1bbe9e06-2a09-4e8a-ae2f-7ad2ad09f6bf req-d5387dac-401a-4ed5-a20a-f448b508be19 service nova] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Refreshing network info cache for port a81e2573-af12-4cc6-93fb-463eedb4ed4b {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2505.334308] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053931, 'name': CreateVM_Task, 'duration_secs': 0.320031} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2505.334656] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2505.335149] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2505.335326] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2505.335655] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2505.335910] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5e94c0a-163a-4f72-93d9-a8cb7e526727 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2505.340304] env[62684]: DEBUG oslo_vmware.api [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2505.340304] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520f6977-e633-d74f-23aa-7a5cf3494687" [ 2505.340304] env[62684]: _type = "Task" [ 2505.340304] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2505.349434] env[62684]: DEBUG oslo_vmware.api [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520f6977-e633-d74f-23aa-7a5cf3494687, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2505.745975] env[62684]: DEBUG nova.network.neutron [req-1bbe9e06-2a09-4e8a-ae2f-7ad2ad09f6bf req-d5387dac-401a-4ed5-a20a-f448b508be19 service nova] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Updated VIF entry in instance network info cache for port a81e2573-af12-4cc6-93fb-463eedb4ed4b. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2505.746368] env[62684]: DEBUG nova.network.neutron [req-1bbe9e06-2a09-4e8a-ae2f-7ad2ad09f6bf req-d5387dac-401a-4ed5-a20a-f448b508be19 service nova] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Updating instance_info_cache with network_info: [{"id": "a81e2573-af12-4cc6-93fb-463eedb4ed4b", "address": "fa:16:3e:6b:cf:c0", "network": {"id": "93f58a85-0f77-4adc-88c5-bee1aa383535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1446072689-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21534190adb0460e9a74363ae059a59d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa81e2573-af", "ovs_interfaceid": "a81e2573-af12-4cc6-93fb-463eedb4ed4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2505.850681] env[62684]: DEBUG oslo_vmware.api [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520f6977-e633-d74f-23aa-7a5cf3494687, 'name': SearchDatastore_Task, 'duration_secs': 0.012787} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2505.850945] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2505.851194] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2505.851431] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2505.851583] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2505.851764] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2505.852029] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-99f73df3-259a-4bcd-9373-2d2d0b2df4c6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2505.859573] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2505.859749] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2505.860415] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cdea87c1-57d8-4df4-9b68-89e5c2eb321c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2505.865219] env[62684]: DEBUG oslo_vmware.api [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2505.865219] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52772c90-0208-2d4a-ffb6-f13cc59e11af" [ 2505.865219] env[62684]: _type = "Task" [ 2505.865219] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2505.871879] env[62684]: DEBUG oslo_vmware.api [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52772c90-0208-2d4a-ffb6-f13cc59e11af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2506.248920] env[62684]: DEBUG oslo_concurrency.lockutils [req-1bbe9e06-2a09-4e8a-ae2f-7ad2ad09f6bf req-d5387dac-401a-4ed5-a20a-f448b508be19 service nova] Releasing lock "refresh_cache-0c7a3cd8-b42a-4111-bcfd-8fcd15b51028" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2506.375570] env[62684]: DEBUG oslo_vmware.api [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52772c90-0208-2d4a-ffb6-f13cc59e11af, 'name': SearchDatastore_Task, 'duration_secs': 0.009074} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2506.376346] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6c4cf90-441e-4bf5-9344-44c0da694f91 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2506.381690] env[62684]: DEBUG oslo_vmware.api [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2506.381690] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f97c3c-9f61-ff31-7fc8-a22c806a2761" [ 2506.381690] env[62684]: _type = "Task" [ 2506.381690] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2506.388838] env[62684]: DEBUG oslo_vmware.api [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f97c3c-9f61-ff31-7fc8-a22c806a2761, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2506.891968] env[62684]: DEBUG oslo_vmware.api [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52f97c3c-9f61-ff31-7fc8-a22c806a2761, 'name': SearchDatastore_Task, 'duration_secs': 0.009406} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2506.892270] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2506.892521] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028/0c7a3cd8-b42a-4111-bcfd-8fcd15b51028.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2506.892802] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f19be5f1-856d-413c-a0a9-04c40ba87962 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2506.899113] env[62684]: DEBUG oslo_vmware.api [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2506.899113] env[62684]: value = "task-2053932" [ 2506.899113] env[62684]: _type = "Task" [ 2506.899113] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2506.906708] env[62684]: DEBUG oslo_vmware.api [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053932, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2507.003124] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Didn't find any instances for network info cache update. {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 2507.003405] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2507.003974] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2507.003974] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62684) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2507.300907] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2507.409440] env[62684]: DEBUG oslo_vmware.api [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053932, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.426274} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2507.409879] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028/0c7a3cd8-b42a-4111-bcfd-8fcd15b51028.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2507.409879] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2507.410157] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-78702101-4867-4556-ae8d-51abd4c08576 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2507.416703] env[62684]: DEBUG oslo_vmware.api [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2507.416703] env[62684]: value = "task-2053933" [ 2507.416703] env[62684]: _type = "Task" [ 2507.416703] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2507.424212] env[62684]: DEBUG oslo_vmware.api [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053933, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2507.926793] env[62684]: DEBUG oslo_vmware.api [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053933, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060354} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2507.927082] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2507.927859] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b713846-75f8-470c-953d-5822aa9a1378 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2507.949895] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028/0c7a3cd8-b42a-4111-bcfd-8fcd15b51028.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2507.950139] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c620fa00-2075-4857-be91-8f97755b6513 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2507.968710] env[62684]: DEBUG oslo_vmware.api [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2507.968710] env[62684]: value = "task-2053934" [ 2507.968710] env[62684]: _type = "Task" [ 2507.968710] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2507.975954] env[62684]: DEBUG oslo_vmware.api [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053934, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2508.295998] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2508.300657] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2508.300842] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2508.478164] env[62684]: DEBUG oslo_vmware.api [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053934, 'name': ReconfigVM_Task, 'duration_secs': 0.272568} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2508.478529] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Reconfigured VM instance instance-0000007a to attach disk [datastore1] 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028/0c7a3cd8-b42a-4111-bcfd-8fcd15b51028.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2508.479051] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cf478924-3a1c-43ba-b7f0-bc4add77404d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2508.485179] env[62684]: DEBUG oslo_vmware.api [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2508.485179] env[62684]: value = "task-2053935" [ 2508.485179] env[62684]: _type = "Task" [ 2508.485179] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2508.492530] env[62684]: DEBUG oslo_vmware.api [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053935, 'name': Rename_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2508.995495] env[62684]: DEBUG oslo_vmware.api [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053935, 'name': Rename_Task, 'duration_secs': 0.143194} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2508.995762] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2508.996012] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-72bb4d27-1f93-44ad-9a58-761c17d2e5a0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2509.001671] env[62684]: DEBUG oslo_vmware.api [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2509.001671] env[62684]: value = "task-2053936" [ 2509.001671] env[62684]: _type = "Task" [ 2509.001671] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2509.008747] env[62684]: DEBUG oslo_vmware.api [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053936, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2509.512149] env[62684]: DEBUG oslo_vmware.api [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053936, 'name': PowerOnVM_Task, 'duration_secs': 0.403597} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2509.512512] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2509.513051] env[62684]: INFO nova.compute.manager [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Took 12.28 seconds to spawn the instance on the hypervisor. [ 2509.513051] env[62684]: DEBUG nova.compute.manager [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2509.513631] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdf0fc52-3907-483c-8d89-ab0aefb608ed {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2510.031336] env[62684]: INFO nova.compute.manager [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Took 16.95 seconds to build instance. [ 2510.520196] env[62684]: DEBUG nova.compute.manager [req-de526435-0d33-4e2d-864d-20ba7c52f30c req-433bd559-17fe-4ae9-8e01-a380e4008428 service nova] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Received event network-changed-a81e2573-af12-4cc6-93fb-463eedb4ed4b {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2510.520196] env[62684]: DEBUG nova.compute.manager [req-de526435-0d33-4e2d-864d-20ba7c52f30c req-433bd559-17fe-4ae9-8e01-a380e4008428 service nova] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Refreshing instance network info cache due to event network-changed-a81e2573-af12-4cc6-93fb-463eedb4ed4b. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2510.520196] env[62684]: DEBUG oslo_concurrency.lockutils [req-de526435-0d33-4e2d-864d-20ba7c52f30c req-433bd559-17fe-4ae9-8e01-a380e4008428 service nova] Acquiring lock "refresh_cache-0c7a3cd8-b42a-4111-bcfd-8fcd15b51028" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2510.520196] env[62684]: DEBUG oslo_concurrency.lockutils [req-de526435-0d33-4e2d-864d-20ba7c52f30c req-433bd559-17fe-4ae9-8e01-a380e4008428 service nova] Acquired lock "refresh_cache-0c7a3cd8-b42a-4111-bcfd-8fcd15b51028" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2510.520196] env[62684]: DEBUG nova.network.neutron [req-de526435-0d33-4e2d-864d-20ba7c52f30c req-433bd559-17fe-4ae9-8e01-a380e4008428 service nova] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Refreshing network info cache for port a81e2573-af12-4cc6-93fb-463eedb4ed4b {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2510.533517] env[62684]: DEBUG oslo_concurrency.lockutils [None req-bd4645c9-d761-4759-9178-e17e03afeefb tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "0c7a3cd8-b42a-4111-bcfd-8fcd15b51028" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.461s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2511.233325] env[62684]: DEBUG nova.network.neutron [req-de526435-0d33-4e2d-864d-20ba7c52f30c req-433bd559-17fe-4ae9-8e01-a380e4008428 service nova] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Updated VIF entry in instance network info cache for port a81e2573-af12-4cc6-93fb-463eedb4ed4b. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2511.233705] env[62684]: DEBUG nova.network.neutron [req-de526435-0d33-4e2d-864d-20ba7c52f30c req-433bd559-17fe-4ae9-8e01-a380e4008428 service nova] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Updating instance_info_cache with network_info: [{"id": "a81e2573-af12-4cc6-93fb-463eedb4ed4b", "address": "fa:16:3e:6b:cf:c0", "network": {"id": "93f58a85-0f77-4adc-88c5-bee1aa383535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1446072689-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21534190adb0460e9a74363ae059a59d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa81e2573-af", "ovs_interfaceid": "a81e2573-af12-4cc6-93fb-463eedb4ed4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2511.296135] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2511.736531] env[62684]: DEBUG oslo_concurrency.lockutils [req-de526435-0d33-4e2d-864d-20ba7c52f30c req-433bd559-17fe-4ae9-8e01-a380e4008428 service nova] Releasing lock "refresh_cache-0c7a3cd8-b42a-4111-bcfd-8fcd15b51028" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2511.800085] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2526.908649] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b909c4b8-1e43-4733-9ae8-c1aeea5137d7 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "84773cfa-edee-44bc-b89d-490d1fef5417" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2526.908933] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b909c4b8-1e43-4733-9ae8-c1aeea5137d7 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "84773cfa-edee-44bc-b89d-490d1fef5417" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2527.412022] env[62684]: DEBUG nova.compute.utils [None req-b909c4b8-1e43-4733-9ae8-c1aeea5137d7 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2527.914941] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b909c4b8-1e43-4733-9ae8-c1aeea5137d7 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "84773cfa-edee-44bc-b89d-490d1fef5417" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2528.971657] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b909c4b8-1e43-4733-9ae8-c1aeea5137d7 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "84773cfa-edee-44bc-b89d-490d1fef5417" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2528.972126] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b909c4b8-1e43-4733-9ae8-c1aeea5137d7 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "84773cfa-edee-44bc-b89d-490d1fef5417" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2528.972199] env[62684]: INFO nova.compute.manager [None req-b909c4b8-1e43-4733-9ae8-c1aeea5137d7 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Attaching volume 1caca476-a74d-4f57-9988-f208d69abc24 to /dev/sdb [ 2529.001419] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-465cfcb6-4488-4a74-a604-db62899af7fa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2529.008773] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d96eae69-a1c4-4e51-bfcb-c737ba1989dc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2529.022754] env[62684]: DEBUG nova.virt.block_device [None req-b909c4b8-1e43-4733-9ae8-c1aeea5137d7 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Updating existing volume attachment record: 5e0dd15a-9b94-4573-9bf7-28dc900755bd {{(pid=62684) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2533.564778] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-b909c4b8-1e43-4733-9ae8-c1aeea5137d7 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Volume attach. Driver type: vmdk {{(pid=62684) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2533.565061] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-b909c4b8-1e43-4733-9ae8-c1aeea5137d7 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421434', 'volume_id': '1caca476-a74d-4f57-9988-f208d69abc24', 'name': 'volume-1caca476-a74d-4f57-9988-f208d69abc24', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '84773cfa-edee-44bc-b89d-490d1fef5417', 'attached_at': '', 'detached_at': '', 'volume_id': '1caca476-a74d-4f57-9988-f208d69abc24', 'serial': '1caca476-a74d-4f57-9988-f208d69abc24'} {{(pid=62684) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2533.565946] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd01446a-8118-4a8d-8f01-ad20f7424713 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2533.581888] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2bc034e-346b-492a-8341-e07313c8906d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2533.605727] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-b909c4b8-1e43-4733-9ae8-c1aeea5137d7 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] volume-1caca476-a74d-4f57-9988-f208d69abc24/volume-1caca476-a74d-4f57-9988-f208d69abc24.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2533.605978] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-93349c28-d4de-4f2d-9541-0a3e500d0be6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2533.624063] env[62684]: DEBUG oslo_vmware.api [None req-b909c4b8-1e43-4733-9ae8-c1aeea5137d7 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2533.624063] env[62684]: value = "task-2053939" [ 2533.624063] env[62684]: _type = "Task" [ 2533.624063] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2533.631620] env[62684]: DEBUG oslo_vmware.api [None req-b909c4b8-1e43-4733-9ae8-c1aeea5137d7 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053939, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2534.134096] env[62684]: DEBUG oslo_vmware.api [None req-b909c4b8-1e43-4733-9ae8-c1aeea5137d7 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053939, 'name': ReconfigVM_Task, 'duration_secs': 0.327259} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2534.134370] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-b909c4b8-1e43-4733-9ae8-c1aeea5137d7 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Reconfigured VM instance instance-00000079 to attach disk [datastore1] volume-1caca476-a74d-4f57-9988-f208d69abc24/volume-1caca476-a74d-4f57-9988-f208d69abc24.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2534.138874] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-729f81f0-b273-495a-9ec8-fb99c9f8d454 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2534.153411] env[62684]: DEBUG oslo_vmware.api [None req-b909c4b8-1e43-4733-9ae8-c1aeea5137d7 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2534.153411] env[62684]: value = "task-2053940" [ 2534.153411] env[62684]: _type = "Task" [ 2534.153411] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2534.161990] env[62684]: DEBUG oslo_vmware.api [None req-b909c4b8-1e43-4733-9ae8-c1aeea5137d7 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053940, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2534.663160] env[62684]: DEBUG oslo_vmware.api [None req-b909c4b8-1e43-4733-9ae8-c1aeea5137d7 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053940, 'name': ReconfigVM_Task, 'duration_secs': 0.157454} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2534.663918] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-b909c4b8-1e43-4733-9ae8-c1aeea5137d7 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421434', 'volume_id': '1caca476-a74d-4f57-9988-f208d69abc24', 'name': 'volume-1caca476-a74d-4f57-9988-f208d69abc24', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '84773cfa-edee-44bc-b89d-490d1fef5417', 'attached_at': '', 'detached_at': '', 'volume_id': '1caca476-a74d-4f57-9988-f208d69abc24', 'serial': '1caca476-a74d-4f57-9988-f208d69abc24'} {{(pid=62684) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2535.702707] env[62684]: DEBUG nova.objects.instance [None req-b909c4b8-1e43-4733-9ae8-c1aeea5137d7 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lazy-loading 'flavor' on Instance uuid 84773cfa-edee-44bc-b89d-490d1fef5417 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2536.207869] env[62684]: DEBUG oslo_concurrency.lockutils [None req-b909c4b8-1e43-4733-9ae8-c1aeea5137d7 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "84773cfa-edee-44bc-b89d-490d1fef5417" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.236s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2536.378086] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d4365a78-c223-4b59-be99-663ff1963fd6 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "84773cfa-edee-44bc-b89d-490d1fef5417" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2536.378372] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d4365a78-c223-4b59-be99-663ff1963fd6 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "84773cfa-edee-44bc-b89d-490d1fef5417" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2536.881251] env[62684]: INFO nova.compute.manager [None req-d4365a78-c223-4b59-be99-663ff1963fd6 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Detaching volume 1caca476-a74d-4f57-9988-f208d69abc24 [ 2536.911340] env[62684]: INFO nova.virt.block_device [None req-d4365a78-c223-4b59-be99-663ff1963fd6 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Attempting to driver detach volume 1caca476-a74d-4f57-9988-f208d69abc24 from mountpoint /dev/sdb [ 2536.911588] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4365a78-c223-4b59-be99-663ff1963fd6 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Volume detach. Driver type: vmdk {{(pid=62684) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2536.911779] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4365a78-c223-4b59-be99-663ff1963fd6 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421434', 'volume_id': '1caca476-a74d-4f57-9988-f208d69abc24', 'name': 'volume-1caca476-a74d-4f57-9988-f208d69abc24', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '84773cfa-edee-44bc-b89d-490d1fef5417', 'attached_at': '', 'detached_at': '', 'volume_id': '1caca476-a74d-4f57-9988-f208d69abc24', 'serial': '1caca476-a74d-4f57-9988-f208d69abc24'} {{(pid=62684) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2536.912651] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95f91542-fdd3-4d3a-82d4-df11ac08b76e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2536.935015] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97031d5a-ae8a-4980-8c0c-37662b2e5ceb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2536.941808] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12dd52a9-6a1d-4e44-9110-becf7cf12dcd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2536.962688] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4906fe4b-2e28-47ee-beaf-7528721b16de {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2536.976392] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4365a78-c223-4b59-be99-663ff1963fd6 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] The volume has not been displaced from its original location: [datastore1] volume-1caca476-a74d-4f57-9988-f208d69abc24/volume-1caca476-a74d-4f57-9988-f208d69abc24.vmdk. No consolidation needed. {{(pid=62684) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2536.981417] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4365a78-c223-4b59-be99-663ff1963fd6 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Reconfiguring VM instance instance-00000079 to detach disk 2001 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2536.981670] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d94167b-8f57-4e01-a93e-02781f50d7b7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2536.998282] env[62684]: DEBUG oslo_vmware.api [None req-d4365a78-c223-4b59-be99-663ff1963fd6 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2536.998282] env[62684]: value = "task-2053941" [ 2536.998282] env[62684]: _type = "Task" [ 2536.998282] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2537.005690] env[62684]: DEBUG oslo_vmware.api [None req-d4365a78-c223-4b59-be99-663ff1963fd6 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053941, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2537.507310] env[62684]: DEBUG oslo_vmware.api [None req-d4365a78-c223-4b59-be99-663ff1963fd6 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053941, 'name': ReconfigVM_Task, 'duration_secs': 0.213288} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2537.507583] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4365a78-c223-4b59-be99-663ff1963fd6 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Reconfigured VM instance instance-00000079 to detach disk 2001 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2537.512127] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4d324d40-6e62-4552-83c1-cca0c9ebb0c8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2537.527133] env[62684]: DEBUG oslo_vmware.api [None req-d4365a78-c223-4b59-be99-663ff1963fd6 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2537.527133] env[62684]: value = "task-2053942" [ 2537.527133] env[62684]: _type = "Task" [ 2537.527133] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2537.534718] env[62684]: DEBUG oslo_vmware.api [None req-d4365a78-c223-4b59-be99-663ff1963fd6 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053942, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2538.036374] env[62684]: DEBUG oslo_vmware.api [None req-d4365a78-c223-4b59-be99-663ff1963fd6 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053942, 'name': ReconfigVM_Task, 'duration_secs': 0.129924} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2538.036682] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4365a78-c223-4b59-be99-663ff1963fd6 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421434', 'volume_id': '1caca476-a74d-4f57-9988-f208d69abc24', 'name': 'volume-1caca476-a74d-4f57-9988-f208d69abc24', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '84773cfa-edee-44bc-b89d-490d1fef5417', 'attached_at': '', 'detached_at': '', 'volume_id': '1caca476-a74d-4f57-9988-f208d69abc24', 'serial': '1caca476-a74d-4f57-9988-f208d69abc24'} {{(pid=62684) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2538.577219] env[62684]: DEBUG nova.objects.instance [None req-d4365a78-c223-4b59-be99-663ff1963fd6 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lazy-loading 'flavor' on Instance uuid 84773cfa-edee-44bc-b89d-490d1fef5417 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2539.584212] env[62684]: DEBUG oslo_concurrency.lockutils [None req-d4365a78-c223-4b59-be99-663ff1963fd6 tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "84773cfa-edee-44bc-b89d-490d1fef5417" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.206s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2540.615392] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e2387f04-5e45-4d8a-821f-daa72244850f tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "84773cfa-edee-44bc-b89d-490d1fef5417" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2540.615774] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e2387f04-5e45-4d8a-821f-daa72244850f tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "84773cfa-edee-44bc-b89d-490d1fef5417" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2540.615923] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e2387f04-5e45-4d8a-821f-daa72244850f tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "84773cfa-edee-44bc-b89d-490d1fef5417-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2540.616148] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e2387f04-5e45-4d8a-821f-daa72244850f tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "84773cfa-edee-44bc-b89d-490d1fef5417-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2540.616331] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e2387f04-5e45-4d8a-821f-daa72244850f tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "84773cfa-edee-44bc-b89d-490d1fef5417-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2540.618439] env[62684]: INFO nova.compute.manager [None req-e2387f04-5e45-4d8a-821f-daa72244850f tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Terminating instance [ 2540.620168] env[62684]: DEBUG nova.compute.manager [None req-e2387f04-5e45-4d8a-821f-daa72244850f tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2540.620366] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e2387f04-5e45-4d8a-821f-daa72244850f tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2540.621261] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbbd3e71-43dc-4f05-ac18-068edf16d858 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2540.629420] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2387f04-5e45-4d8a-821f-daa72244850f tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2540.629645] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ba89b110-b566-483b-a6ce-9b531c7aec01 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2540.635757] env[62684]: DEBUG oslo_vmware.api [None req-e2387f04-5e45-4d8a-821f-daa72244850f tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2540.635757] env[62684]: value = "task-2053943" [ 2540.635757] env[62684]: _type = "Task" [ 2540.635757] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2540.645303] env[62684]: DEBUG oslo_vmware.api [None req-e2387f04-5e45-4d8a-821f-daa72244850f tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053943, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2541.145511] env[62684]: DEBUG oslo_vmware.api [None req-e2387f04-5e45-4d8a-821f-daa72244850f tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053943, 'name': PowerOffVM_Task, 'duration_secs': 0.193944} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2541.145702] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2387f04-5e45-4d8a-821f-daa72244850f tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2541.145818] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e2387f04-5e45-4d8a-821f-daa72244850f tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2541.146046] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-960c8778-0110-47cc-8078-f5ef6ee4e448 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2543.051960] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e2387f04-5e45-4d8a-821f-daa72244850f tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2543.052390] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e2387f04-5e45-4d8a-821f-daa72244850f tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Deleting contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2543.052390] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2387f04-5e45-4d8a-821f-daa72244850f tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Deleting the datastore file [datastore2] 84773cfa-edee-44bc-b89d-490d1fef5417 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2543.052714] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-038f3dad-2f6f-41f3-8aa0-ae010c461bfa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2543.059473] env[62684]: DEBUG oslo_vmware.api [None req-e2387f04-5e45-4d8a-821f-daa72244850f tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for the task: (returnval){ [ 2543.059473] env[62684]: value = "task-2053945" [ 2543.059473] env[62684]: _type = "Task" [ 2543.059473] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2543.066906] env[62684]: DEBUG oslo_vmware.api [None req-e2387f04-5e45-4d8a-821f-daa72244850f tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053945, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2543.569661] env[62684]: DEBUG oslo_vmware.api [None req-e2387f04-5e45-4d8a-821f-daa72244850f tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Task: {'id': task-2053945, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137033} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2543.569912] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2387f04-5e45-4d8a-821f-daa72244850f tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2543.570114] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e2387f04-5e45-4d8a-821f-daa72244850f tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Deleted contents of the VM from datastore datastore2 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2543.570297] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e2387f04-5e45-4d8a-821f-daa72244850f tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2543.570475] env[62684]: INFO nova.compute.manager [None req-e2387f04-5e45-4d8a-821f-daa72244850f tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Took 2.95 seconds to destroy the instance on the hypervisor. [ 2543.570712] env[62684]: DEBUG oslo.service.loopingcall [None req-e2387f04-5e45-4d8a-821f-daa72244850f tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2543.570899] env[62684]: DEBUG nova.compute.manager [-] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2543.570995] env[62684]: DEBUG nova.network.neutron [-] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2544.023765] env[62684]: DEBUG nova.compute.manager [req-88797d28-ce38-4858-b4a6-aa4c3f59e816 req-e5a61dd0-a55b-4c4b-8cf4-f293054d322e service nova] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Received event network-vif-deleted-23ff2d84-2b97-42ec-af87-1ac96cada09b {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2544.023919] env[62684]: INFO nova.compute.manager [req-88797d28-ce38-4858-b4a6-aa4c3f59e816 req-e5a61dd0-a55b-4c4b-8cf4-f293054d322e service nova] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Neutron deleted interface 23ff2d84-2b97-42ec-af87-1ac96cada09b; detaching it from the instance and deleting it from the info cache [ 2544.024201] env[62684]: DEBUG nova.network.neutron [req-88797d28-ce38-4858-b4a6-aa4c3f59e816 req-e5a61dd0-a55b-4c4b-8cf4-f293054d322e service nova] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2544.496845] env[62684]: DEBUG nova.network.neutron [-] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2544.527583] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-875b398c-9a3e-4c39-967e-74af9a3de6b1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2544.538381] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cccf25e0-30e4-4702-b1ae-439cd9004854 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2544.563840] env[62684]: DEBUG nova.compute.manager [req-88797d28-ce38-4858-b4a6-aa4c3f59e816 req-e5a61dd0-a55b-4c4b-8cf4-f293054d322e service nova] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Detach interface failed, port_id=23ff2d84-2b97-42ec-af87-1ac96cada09b, reason: Instance 84773cfa-edee-44bc-b89d-490d1fef5417 could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2545.000532] env[62684]: INFO nova.compute.manager [-] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Took 1.43 seconds to deallocate network for instance. [ 2545.506786] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e2387f04-5e45-4d8a-821f-daa72244850f tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2545.507177] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e2387f04-5e45-4d8a-821f-daa72244850f tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2545.507264] env[62684]: DEBUG nova.objects.instance [None req-e2387f04-5e45-4d8a-821f-daa72244850f tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lazy-loading 'resources' on Instance uuid 84773cfa-edee-44bc-b89d-490d1fef5417 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2546.027103] env[62684]: DEBUG nova.scheduler.client.report [None req-e2387f04-5e45-4d8a-821f-daa72244850f tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Refreshing inventories for resource provider c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2546.042034] env[62684]: DEBUG nova.scheduler.client.report [None req-e2387f04-5e45-4d8a-821f-daa72244850f tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Updating ProviderTree inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2546.042034] env[62684]: DEBUG nova.compute.provider_tree [None req-e2387f04-5e45-4d8a-821f-daa72244850f tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2546.051325] env[62684]: DEBUG nova.scheduler.client.report [None req-e2387f04-5e45-4d8a-821f-daa72244850f tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Refreshing aggregate associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, aggregates: None {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2546.068828] env[62684]: DEBUG nova.scheduler.client.report [None req-e2387f04-5e45-4d8a-821f-daa72244850f tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Refreshing trait associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2546.102371] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6cc64f6-3cec-4094-a215-adc929bf68d6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2546.110062] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a32ea3ba-c291-494a-a03c-bd8ddacf8823 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2546.139415] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2759a915-5ad8-44d2-9963-e40a93dd5bf3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2546.146306] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d79599e-cb5e-4541-97e7-7e953c71d6f2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2546.158818] env[62684]: DEBUG nova.compute.provider_tree [None req-e2387f04-5e45-4d8a-821f-daa72244850f tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2546.661736] env[62684]: DEBUG nova.scheduler.client.report [None req-e2387f04-5e45-4d8a-821f-daa72244850f tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2547.166539] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e2387f04-5e45-4d8a-821f-daa72244850f tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.659s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2547.187135] env[62684]: INFO nova.scheduler.client.report [None req-e2387f04-5e45-4d8a-821f-daa72244850f tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Deleted allocations for instance 84773cfa-edee-44bc-b89d-490d1fef5417 [ 2547.696644] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e2387f04-5e45-4d8a-821f-daa72244850f tempest-AttachVolumeNegativeTest-72266782 tempest-AttachVolumeNegativeTest-72266782-project-member] Lock "84773cfa-edee-44bc-b89d-490d1fef5417" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.081s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2547.933009] env[62684]: DEBUG nova.compute.manager [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2547.934055] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c56668a3-5d6c-4ceb-a06c-a6f9c0bd720b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2548.446297] env[62684]: INFO nova.compute.manager [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] instance snapshotting [ 2548.447335] env[62684]: DEBUG nova.objects.instance [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lazy-loading 'flavor' on Instance uuid 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2548.953666] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76242bb9-53ae-43f9-b49d-82ee7f5a9827 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2548.971843] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9150b5ed-542f-45b2-8e70-331740fb1662 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2549.482257] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Creating Snapshot of the VM instance {{(pid=62684) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2549.482629] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-bc5219db-0ebe-4c0b-b21f-8d2355747e91 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2549.490571] env[62684]: DEBUG oslo_vmware.api [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2549.490571] env[62684]: value = "task-2053947" [ 2549.490571] env[62684]: _type = "Task" [ 2549.490571] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2549.498270] env[62684]: DEBUG oslo_vmware.api [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053947, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2550.000801] env[62684]: DEBUG oslo_vmware.api [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053947, 'name': CreateSnapshot_Task, 'duration_secs': 0.444242} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2550.001384] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Created Snapshot of the VM instance {{(pid=62684) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2550.001863] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5a32bb1-5a1c-4e40-ab5a-c415760981b5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2550.518831] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Creating linked-clone VM from snapshot {{(pid=62684) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2550.519125] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-20603b11-8446-4253-bf0a-d3d48a069a35 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2550.528546] env[62684]: DEBUG oslo_vmware.api [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2550.528546] env[62684]: value = "task-2053948" [ 2550.528546] env[62684]: _type = "Task" [ 2550.528546] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2550.536477] env[62684]: DEBUG oslo_vmware.api [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053948, 'name': CloneVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2551.038902] env[62684]: DEBUG oslo_vmware.api [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053948, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2551.539441] env[62684]: DEBUG oslo_vmware.api [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053948, 'name': CloneVM_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2552.041054] env[62684]: DEBUG oslo_vmware.api [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053948, 'name': CloneVM_Task, 'duration_secs': 1.053116} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2552.041431] env[62684]: INFO nova.virt.vmwareapi.vmops [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Created linked-clone VM from snapshot [ 2552.042045] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b073772-3a39-4da5-b9e1-b7f5aedd7a6f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2552.048900] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Uploading image 2225be2a-401c-48c9-90d9-576e4579acf9 {{(pid=62684) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2552.069851] env[62684]: DEBUG oslo_vmware.rw_handles [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2552.069851] env[62684]: value = "vm-421436" [ 2552.069851] env[62684]: _type = "VirtualMachine" [ 2552.069851] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2552.070110] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-ad16fac9-7785-4ea4-b012-64c5524e1814 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2552.076247] env[62684]: DEBUG oslo_vmware.rw_handles [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lease: (returnval){ [ 2552.076247] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523bcce2-d644-f7f0-a611-6884dba3ad3d" [ 2552.076247] env[62684]: _type = "HttpNfcLease" [ 2552.076247] env[62684]: } obtained for exporting VM: (result){ [ 2552.076247] env[62684]: value = "vm-421436" [ 2552.076247] env[62684]: _type = "VirtualMachine" [ 2552.076247] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2552.076561] env[62684]: DEBUG oslo_vmware.api [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the lease: (returnval){ [ 2552.076561] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523bcce2-d644-f7f0-a611-6884dba3ad3d" [ 2552.076561] env[62684]: _type = "HttpNfcLease" [ 2552.076561] env[62684]: } to be ready. {{(pid=62684) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2552.082248] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2552.082248] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523bcce2-d644-f7f0-a611-6884dba3ad3d" [ 2552.082248] env[62684]: _type = "HttpNfcLease" [ 2552.082248] env[62684]: } is initializing. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2552.588022] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2552.588022] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523bcce2-d644-f7f0-a611-6884dba3ad3d" [ 2552.588022] env[62684]: _type = "HttpNfcLease" [ 2552.588022] env[62684]: } is ready. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2552.588022] env[62684]: DEBUG oslo_vmware.rw_handles [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2552.588022] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523bcce2-d644-f7f0-a611-6884dba3ad3d" [ 2552.588022] env[62684]: _type = "HttpNfcLease" [ 2552.588022] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2552.588022] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48798e9c-12c2-455e-af9a-e9bce9053a42 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2552.594392] env[62684]: DEBUG oslo_vmware.rw_handles [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5241dfab-7558-3362-3c50-1fd9e1c1c6d2/disk-0.vmdk from lease info. {{(pid=62684) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2552.594702] env[62684]: DEBUG oslo_vmware.rw_handles [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5241dfab-7558-3362-3c50-1fd9e1c1c6d2/disk-0.vmdk for reading. {{(pid=62684) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2552.679650] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-04bf1c0b-9cf4-41f4-84a9-b32dc1d28420 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2560.477488] env[62684]: DEBUG oslo_vmware.rw_handles [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5241dfab-7558-3362-3c50-1fd9e1c1c6d2/disk-0.vmdk. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2560.478469] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2df213ea-755f-4b78-b882-aab4cad2074a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2560.484555] env[62684]: DEBUG oslo_vmware.rw_handles [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5241dfab-7558-3362-3c50-1fd9e1c1c6d2/disk-0.vmdk is in state: ready. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2560.484722] env[62684]: ERROR oslo_vmware.rw_handles [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5241dfab-7558-3362-3c50-1fd9e1c1c6d2/disk-0.vmdk due to incomplete transfer. [ 2560.484944] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-4e8c233f-1787-47f7-8c83-1c51b6f5dd2e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2560.491487] env[62684]: DEBUG oslo_vmware.rw_handles [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5241dfab-7558-3362-3c50-1fd9e1c1c6d2/disk-0.vmdk. {{(pid=62684) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2560.491682] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Uploaded image 2225be2a-401c-48c9-90d9-576e4579acf9 to the Glance image server {{(pid=62684) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2560.494242] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Destroying the VM {{(pid=62684) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2560.494458] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-7703a52f-6d8c-4261-bef4-18da8b530a3a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2560.500807] env[62684]: DEBUG oslo_vmware.api [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2560.500807] env[62684]: value = "task-2053952" [ 2560.500807] env[62684]: _type = "Task" [ 2560.500807] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2560.508574] env[62684]: DEBUG oslo_vmware.api [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053952, 'name': Destroy_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2561.011268] env[62684]: DEBUG oslo_vmware.api [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053952, 'name': Destroy_Task} progress is 33%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2561.511947] env[62684]: DEBUG oslo_vmware.api [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053952, 'name': Destroy_Task} progress is 33%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2562.012723] env[62684]: DEBUG oslo_vmware.api [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053952, 'name': Destroy_Task} progress is 33%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2562.300740] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2562.512929] env[62684]: DEBUG oslo_vmware.api [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053952, 'name': Destroy_Task} progress is 33%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2562.803814] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2562.804174] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2562.804258] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2562.804405] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62684) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2562.805316] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c9659df-3228-4f9d-8e0a-b3cb64a03596 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2562.813634] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0bd4888-d81d-406d-bc60-1181987064ee {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2562.827039] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-117b94ca-f9ac-4375-a336-a513de625a3c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2562.832980] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bcd0247-b9c2-48e9-aaa9-93aa2748ffb1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2563.524102] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181146MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=62684) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2563.524513] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2563.524513] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2563.535346] env[62684]: DEBUG oslo_vmware.api [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053952, 'name': Destroy_Task} progress is 33%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2564.034365] env[62684]: DEBUG oslo_vmware.api [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053952, 'name': Destroy_Task, 'duration_secs': 3.087936} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2564.034832] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Destroyed the VM [ 2564.035077] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Deleting Snapshot of the VM instance {{(pid=62684) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2564.035321] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-70dfa1f6-4edf-4217-a5dc-79a335435926 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2564.042053] env[62684]: DEBUG oslo_vmware.api [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2564.042053] env[62684]: value = "task-2053953" [ 2564.042053] env[62684]: _type = "Task" [ 2564.042053] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2564.050272] env[62684]: DEBUG oslo_vmware.api [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053953, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2564.551853] env[62684]: DEBUG oslo_vmware.api [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053953, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2564.556614] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2564.556801] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2564.556940] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2564.583243] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a47ca6e-a0c3-4894-9fbf-a7dc53150519 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2564.590635] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04a7c3a4-1770-4396-bfe8-8e55c0f80223 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2564.621863] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37a8bc85-e38e-42f0-814c-5b5670218887 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2564.632668] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54c0da1f-d360-48cb-9cfa-f10ca7711b8e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2564.652152] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2565.052259] env[62684]: DEBUG oslo_vmware.api [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053953, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2565.155577] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2565.553309] env[62684]: DEBUG oslo_vmware.api [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053953, 'name': RemoveSnapshot_Task, 'duration_secs': 1.016019} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2565.553673] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Deleted Snapshot of the VM instance {{(pid=62684) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2565.553796] env[62684]: INFO nova.compute.manager [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Took 16.60 seconds to snapshot the instance on the hypervisor. [ 2565.660720] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62684) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2565.660888] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.136s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2566.108695] env[62684]: DEBUG nova.compute.manager [None req-9e9967f3-77d1-4ecb-9dc2-cb96f29681ef tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Found 1 images (rotation: 2) {{(pid=62684) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4555}} [ 2567.176265] env[62684]: DEBUG nova.compute.manager [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2567.177232] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b7b5f81-0a7f-4465-abc1-1ea77536cbd2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2567.661632] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2567.661842] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Starting heal instance info cache {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2567.661911] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Rebuilding the list of instances to heal {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2567.687720] env[62684]: INFO nova.compute.manager [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] instance snapshotting [ 2567.688334] env[62684]: DEBUG nova.objects.instance [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lazy-loading 'flavor' on Instance uuid 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2568.165813] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "refresh_cache-0c7a3cd8-b42a-4111-bcfd-8fcd15b51028" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2568.165971] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired lock "refresh_cache-0c7a3cd8-b42a-4111-bcfd-8fcd15b51028" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2568.166119] env[62684]: DEBUG nova.network.neutron [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Forcefully refreshing network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2568.166293] env[62684]: DEBUG nova.objects.instance [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lazy-loading 'info_cache' on Instance uuid 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2568.192923] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5f69420-5414-429f-8dbd-e26695db4476 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2568.212752] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10ee309d-543f-4ccd-8427-c0dbb7e16aa8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2568.723290] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Creating Snapshot of the VM instance {{(pid=62684) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2568.723604] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-9d985025-a397-48de-ba7e-8f4d1856b0a1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2568.731708] env[62684]: DEBUG oslo_vmware.api [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2568.731708] env[62684]: value = "task-2053954" [ 2568.731708] env[62684]: _type = "Task" [ 2568.731708] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2568.739141] env[62684]: DEBUG oslo_vmware.api [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053954, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2569.242342] env[62684]: DEBUG oslo_vmware.api [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053954, 'name': CreateSnapshot_Task, 'duration_secs': 0.401984} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2569.242706] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Created Snapshot of the VM instance {{(pid=62684) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2569.243438] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e73dd29a-a347-4a72-b5ae-7ffdc316f5d2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2569.760848] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Creating linked-clone VM from snapshot {{(pid=62684) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2569.763467] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-68a49a83-5030-4598-9293-85c4f6196bd2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2569.771629] env[62684]: DEBUG oslo_vmware.api [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2569.771629] env[62684]: value = "task-2053955" [ 2569.771629] env[62684]: _type = "Task" [ 2569.771629] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2569.779341] env[62684]: DEBUG oslo_vmware.api [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053955, 'name': CloneVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2569.909424] env[62684]: DEBUG nova.network.neutron [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Updating instance_info_cache with network_info: [{"id": "a81e2573-af12-4cc6-93fb-463eedb4ed4b", "address": "fa:16:3e:6b:cf:c0", "network": {"id": "93f58a85-0f77-4adc-88c5-bee1aa383535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1446072689-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21534190adb0460e9a74363ae059a59d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa81e2573-af", "ovs_interfaceid": "a81e2573-af12-4cc6-93fb-463eedb4ed4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2570.282393] env[62684]: DEBUG oslo_vmware.api [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053955, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2570.412425] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Releasing lock "refresh_cache-0c7a3cd8-b42a-4111-bcfd-8fcd15b51028" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2570.412596] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Updated the network info_cache for instance {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 2570.412795] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2570.412953] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2570.413120] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2570.413329] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2570.413470] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2570.413597] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62684) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2570.781910] env[62684]: DEBUG oslo_vmware.api [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053955, 'name': CloneVM_Task} progress is 95%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2571.048396] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2571.282984] env[62684]: DEBUG oslo_vmware.api [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053955, 'name': CloneVM_Task, 'duration_secs': 1.305654} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2571.283422] env[62684]: INFO nova.virt.vmwareapi.vmops [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Created linked-clone VM from snapshot [ 2571.284055] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7524386d-5a09-4e6b-bb73-a1174ffbae6d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2571.292080] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Uploading image 4f4d43d3-c02c-462b-b42c-b6bf3c6bf789 {{(pid=62684) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2571.311355] env[62684]: DEBUG oslo_vmware.rw_handles [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2571.311355] env[62684]: value = "vm-421438" [ 2571.311355] env[62684]: _type = "VirtualMachine" [ 2571.311355] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2571.311633] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-4b470bf7-2a1b-4c23-8049-f2ea40e51f5d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2571.317796] env[62684]: DEBUG oslo_vmware.rw_handles [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lease: (returnval){ [ 2571.317796] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52de54b6-a091-be5d-6602-428d47eaa05e" [ 2571.317796] env[62684]: _type = "HttpNfcLease" [ 2571.317796] env[62684]: } obtained for exporting VM: (result){ [ 2571.317796] env[62684]: value = "vm-421438" [ 2571.317796] env[62684]: _type = "VirtualMachine" [ 2571.317796] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2571.318082] env[62684]: DEBUG oslo_vmware.api [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the lease: (returnval){ [ 2571.318082] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52de54b6-a091-be5d-6602-428d47eaa05e" [ 2571.318082] env[62684]: _type = "HttpNfcLease" [ 2571.318082] env[62684]: } to be ready. {{(pid=62684) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2571.324226] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2571.324226] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52de54b6-a091-be5d-6602-428d47eaa05e" [ 2571.324226] env[62684]: _type = "HttpNfcLease" [ 2571.324226] env[62684]: } is initializing. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2571.826368] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2571.826368] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52de54b6-a091-be5d-6602-428d47eaa05e" [ 2571.826368] env[62684]: _type = "HttpNfcLease" [ 2571.826368] env[62684]: } is ready. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2571.826716] env[62684]: DEBUG oslo_vmware.rw_handles [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2571.826716] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52de54b6-a091-be5d-6602-428d47eaa05e" [ 2571.826716] env[62684]: _type = "HttpNfcLease" [ 2571.826716] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2571.827342] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5875a48d-03d6-4e1f-ae19-4a830592021f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2571.833846] env[62684]: DEBUG oslo_vmware.rw_handles [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ef7250-b853-3bc9-7f23-8abc998deb4a/disk-0.vmdk from lease info. {{(pid=62684) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2571.834031] env[62684]: DEBUG oslo_vmware.rw_handles [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ef7250-b853-3bc9-7f23-8abc998deb4a/disk-0.vmdk for reading. {{(pid=62684) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2571.919824] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-1189eb3e-29ca-4513-8442-afe7f53e74bf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2572.301414] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2579.269520] env[62684]: DEBUG oslo_vmware.rw_handles [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ef7250-b853-3bc9-7f23-8abc998deb4a/disk-0.vmdk. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2579.270465] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0c7db59-10bf-4f62-8642-b5d2ff0d7832 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2579.276697] env[62684]: DEBUG oslo_vmware.rw_handles [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ef7250-b853-3bc9-7f23-8abc998deb4a/disk-0.vmdk is in state: ready. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2579.276865] env[62684]: ERROR oslo_vmware.rw_handles [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ef7250-b853-3bc9-7f23-8abc998deb4a/disk-0.vmdk due to incomplete transfer. [ 2579.277088] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-b7911799-9ce9-4373-b210-387318c17109 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2579.284283] env[62684]: DEBUG oslo_vmware.rw_handles [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ef7250-b853-3bc9-7f23-8abc998deb4a/disk-0.vmdk. {{(pid=62684) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2579.284499] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Uploaded image 4f4d43d3-c02c-462b-b42c-b6bf3c6bf789 to the Glance image server {{(pid=62684) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2579.286572] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Destroying the VM {{(pid=62684) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2579.286794] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-1bdcdc51-5fc4-4adf-8be9-24c052a44ca3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2579.292414] env[62684]: DEBUG oslo_vmware.api [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2579.292414] env[62684]: value = "task-2053957" [ 2579.292414] env[62684]: _type = "Task" [ 2579.292414] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2579.299537] env[62684]: DEBUG oslo_vmware.api [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053957, 'name': Destroy_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2579.802153] env[62684]: DEBUG oslo_vmware.api [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053957, 'name': Destroy_Task, 'duration_secs': 0.392831} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2579.802379] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Destroyed the VM [ 2579.802614] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Deleting Snapshot of the VM instance {{(pid=62684) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2579.802866] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-49dcd544-9337-4309-8fb0-52393f44d06e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2579.809417] env[62684]: DEBUG oslo_vmware.api [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2579.809417] env[62684]: value = "task-2053958" [ 2579.809417] env[62684]: _type = "Task" [ 2579.809417] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2579.816418] env[62684]: DEBUG oslo_vmware.api [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053958, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2580.319504] env[62684]: DEBUG oslo_vmware.api [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053958, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2580.820082] env[62684]: DEBUG oslo_vmware.api [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053958, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2581.321788] env[62684]: DEBUG oslo_vmware.api [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053958, 'name': RemoveSnapshot_Task, 'duration_secs': 1.352304} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2581.322160] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Deleted Snapshot of the VM instance {{(pid=62684) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2581.322308] env[62684]: INFO nova.compute.manager [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Took 13.13 seconds to snapshot the instance on the hypervisor. [ 2581.857982] env[62684]: DEBUG nova.compute.manager [None req-75fde343-1a35-4f04-a21d-4756bc370e5d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Found 2 images (rotation: 2) {{(pid=62684) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4555}} [ 2583.073311] env[62684]: DEBUG nova.compute.manager [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2583.074322] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3766d9b9-f507-4c89-9bbb-00d654082509 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2583.585013] env[62684]: INFO nova.compute.manager [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] instance snapshotting [ 2583.585634] env[62684]: DEBUG nova.objects.instance [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lazy-loading 'flavor' on Instance uuid 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2584.091356] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e62417d9-e328-4461-88df-d31eb9e6cc2a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2584.110127] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c58c54b7-b149-43e2-8ee9-bc9c0ea1dd3f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2584.619604] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Creating Snapshot of the VM instance {{(pid=62684) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2584.619934] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-1cebf1ad-08e1-4492-b283-7f9c7a57e03b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2584.627651] env[62684]: DEBUG oslo_vmware.api [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2584.627651] env[62684]: value = "task-2053959" [ 2584.627651] env[62684]: _type = "Task" [ 2584.627651] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2584.634994] env[62684]: DEBUG oslo_vmware.api [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053959, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2585.137090] env[62684]: DEBUG oslo_vmware.api [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053959, 'name': CreateSnapshot_Task, 'duration_secs': 0.406019} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2585.137482] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Created Snapshot of the VM instance {{(pid=62684) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2585.138055] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09daed9c-67f0-4475-8202-cea8a8fe57c7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2585.654873] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Creating linked-clone VM from snapshot {{(pid=62684) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2585.655199] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-cfbe7255-a635-40da-82ff-6e66b740d3ff {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2585.663607] env[62684]: DEBUG oslo_vmware.api [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2585.663607] env[62684]: value = "task-2053960" [ 2585.663607] env[62684]: _type = "Task" [ 2585.663607] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2585.671065] env[62684]: DEBUG oslo_vmware.api [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053960, 'name': CloneVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2586.173933] env[62684]: DEBUG oslo_vmware.api [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053960, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2586.675494] env[62684]: DEBUG oslo_vmware.api [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053960, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2587.176445] env[62684]: DEBUG oslo_vmware.api [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053960, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2587.676570] env[62684]: DEBUG oslo_vmware.api [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053960, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2588.179405] env[62684]: DEBUG oslo_vmware.api [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053960, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2588.678540] env[62684]: DEBUG oslo_vmware.api [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053960, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2589.178859] env[62684]: DEBUG oslo_vmware.api [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053960, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2589.680326] env[62684]: DEBUG oslo_vmware.api [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053960, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2590.183284] env[62684]: DEBUG oslo_vmware.api [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053960, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2590.682233] env[62684]: DEBUG oslo_vmware.api [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053960, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2591.184218] env[62684]: DEBUG oslo_vmware.api [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053960, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2591.684799] env[62684]: DEBUG oslo_vmware.api [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053960, 'name': CloneVM_Task} progress is 95%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2592.186608] env[62684]: DEBUG oslo_vmware.api [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053960, 'name': CloneVM_Task, 'duration_secs': 6.279784} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2592.186872] env[62684]: INFO nova.virt.vmwareapi.vmops [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Created linked-clone VM from snapshot [ 2592.187598] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c6ff822-2198-42d5-b87f-b690faf7a497 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2592.194395] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Uploading image e2f53733-f949-49ed-9879-236e9bbc7ba2 {{(pid=62684) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2592.218543] env[62684]: DEBUG oslo_vmware.rw_handles [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2592.218543] env[62684]: value = "vm-421440" [ 2592.218543] env[62684]: _type = "VirtualMachine" [ 2592.218543] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2592.218799] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-29cf1a4e-be54-4f33-966b-168565fb18cd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2592.225637] env[62684]: DEBUG oslo_vmware.rw_handles [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lease: (returnval){ [ 2592.225637] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52099806-39a0-d889-43fd-4c6876904e73" [ 2592.225637] env[62684]: _type = "HttpNfcLease" [ 2592.225637] env[62684]: } obtained for exporting VM: (result){ [ 2592.225637] env[62684]: value = "vm-421440" [ 2592.225637] env[62684]: _type = "VirtualMachine" [ 2592.225637] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2592.225925] env[62684]: DEBUG oslo_vmware.api [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the lease: (returnval){ [ 2592.225925] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52099806-39a0-d889-43fd-4c6876904e73" [ 2592.225925] env[62684]: _type = "HttpNfcLease" [ 2592.225925] env[62684]: } to be ready. {{(pid=62684) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2592.231624] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2592.231624] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52099806-39a0-d889-43fd-4c6876904e73" [ 2592.231624] env[62684]: _type = "HttpNfcLease" [ 2592.231624] env[62684]: } is initializing. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2592.735329] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2592.735329] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52099806-39a0-d889-43fd-4c6876904e73" [ 2592.735329] env[62684]: _type = "HttpNfcLease" [ 2592.735329] env[62684]: } is ready. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2592.735926] env[62684]: DEBUG oslo_vmware.rw_handles [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2592.735926] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52099806-39a0-d889-43fd-4c6876904e73" [ 2592.735926] env[62684]: _type = "HttpNfcLease" [ 2592.735926] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2592.736345] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cb7153a-97c3-480f-ac91-5ad8b31a66c5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2592.743547] env[62684]: DEBUG oslo_vmware.rw_handles [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5273e14f-7c27-03d0-364d-1d0786933e83/disk-0.vmdk from lease info. {{(pid=62684) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2592.743788] env[62684]: DEBUG oslo_vmware.rw_handles [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5273e14f-7c27-03d0-364d-1d0786933e83/disk-0.vmdk for reading. {{(pid=62684) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2592.833198] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-3e3e506f-341c-487a-956e-7447e67f765f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2600.142113] env[62684]: DEBUG oslo_vmware.rw_handles [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5273e14f-7c27-03d0-364d-1d0786933e83/disk-0.vmdk. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2600.143057] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e0df8ef-1a77-40ee-9c56-1674b1b60303 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2600.149171] env[62684]: DEBUG oslo_vmware.rw_handles [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5273e14f-7c27-03d0-364d-1d0786933e83/disk-0.vmdk is in state: ready. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2600.149349] env[62684]: ERROR oslo_vmware.rw_handles [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5273e14f-7c27-03d0-364d-1d0786933e83/disk-0.vmdk due to incomplete transfer. [ 2600.149555] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-c8a6c1da-0423-4407-a244-a983135ac5cb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2600.157525] env[62684]: DEBUG oslo_vmware.rw_handles [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5273e14f-7c27-03d0-364d-1d0786933e83/disk-0.vmdk. {{(pid=62684) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2600.157714] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Uploaded image e2f53733-f949-49ed-9879-236e9bbc7ba2 to the Glance image server {{(pid=62684) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2600.159899] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Destroying the VM {{(pid=62684) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2600.160152] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-de9eacd0-b5e8-4ed2-98a9-f4bffd189db6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2600.165728] env[62684]: DEBUG oslo_vmware.api [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2600.165728] env[62684]: value = "task-2053962" [ 2600.165728] env[62684]: _type = "Task" [ 2600.165728] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2600.172889] env[62684]: DEBUG oslo_vmware.api [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053962, 'name': Destroy_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2600.675060] env[62684]: DEBUG oslo_vmware.api [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053962, 'name': Destroy_Task, 'duration_secs': 0.380331} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2600.675340] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Destroyed the VM [ 2600.675577] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Deleting Snapshot of the VM instance {{(pid=62684) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2600.675844] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-65728e1b-3926-420f-a982-99c44ac93ea5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2600.681602] env[62684]: DEBUG oslo_vmware.api [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2600.681602] env[62684]: value = "task-2053963" [ 2600.681602] env[62684]: _type = "Task" [ 2600.681602] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2600.688446] env[62684]: DEBUG oslo_vmware.api [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053963, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2601.192492] env[62684]: DEBUG oslo_vmware.api [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053963, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2601.692610] env[62684]: DEBUG oslo_vmware.api [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053963, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2602.194229] env[62684]: DEBUG oslo_vmware.api [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053963, 'name': RemoveSnapshot_Task, 'duration_secs': 1.303813} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2602.194641] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Deleted Snapshot of the VM instance {{(pid=62684) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2602.194786] env[62684]: INFO nova.compute.manager [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Took 18.10 seconds to snapshot the instance on the hypervisor. [ 2602.735837] env[62684]: DEBUG nova.compute.manager [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Found 3 images (rotation: 2) {{(pid=62684) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4555}} [ 2602.736072] env[62684]: DEBUG nova.compute.manager [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Rotating out 1 backups {{(pid=62684) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4563}} [ 2602.736243] env[62684]: DEBUG nova.compute.manager [None req-845de44b-7b58-4816-8cd5-9a24445b322d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Deleting image 2225be2a-401c-48c9-90d9-576e4579acf9 {{(pid=62684) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4568}} [ 2604.050352] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a499c4b0-1868-4b95-a241-c0c8c77e9607 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "0c7a3cd8-b42a-4111-bcfd-8fcd15b51028" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2604.050740] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a499c4b0-1868-4b95-a241-c0c8c77e9607 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "0c7a3cd8-b42a-4111-bcfd-8fcd15b51028" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2604.051151] env[62684]: DEBUG nova.compute.manager [None req-a499c4b0-1868-4b95-a241-c0c8c77e9607 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2604.051802] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af14441b-bd98-4f4f-bbdf-17d152aa3237 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2604.059009] env[62684]: DEBUG nova.compute.manager [None req-a499c4b0-1868-4b95-a241-c0c8c77e9607 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62684) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 2604.059562] env[62684]: DEBUG nova.objects.instance [None req-a499c4b0-1868-4b95-a241-c0c8c77e9607 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lazy-loading 'flavor' on Instance uuid 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2604.565052] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a499c4b0-1868-4b95-a241-c0c8c77e9607 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2604.565245] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9a9ff28a-987b-4292-a7a5-0fd65c5c71a1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2604.572442] env[62684]: DEBUG oslo_vmware.api [None req-a499c4b0-1868-4b95-a241-c0c8c77e9607 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2604.572442] env[62684]: value = "task-2053964" [ 2604.572442] env[62684]: _type = "Task" [ 2604.572442] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2604.580443] env[62684]: DEBUG oslo_vmware.api [None req-a499c4b0-1868-4b95-a241-c0c8c77e9607 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053964, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2605.081343] env[62684]: DEBUG oslo_vmware.api [None req-a499c4b0-1868-4b95-a241-c0c8c77e9607 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053964, 'name': PowerOffVM_Task, 'duration_secs': 0.167889} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2605.081724] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-a499c4b0-1868-4b95-a241-c0c8c77e9607 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2605.081813] env[62684]: DEBUG nova.compute.manager [None req-a499c4b0-1868-4b95-a241-c0c8c77e9607 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2605.082543] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d14080-d59b-48e8-914a-0d987e2a1c6f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2605.593977] env[62684]: DEBUG oslo_concurrency.lockutils [None req-a499c4b0-1868-4b95-a241-c0c8c77e9607 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "0c7a3cd8-b42a-4111-bcfd-8fcd15b51028" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.543s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2607.240566] env[62684]: DEBUG nova.compute.manager [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Stashing vm_state: stopped {{(pid=62684) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 2607.757175] env[62684]: DEBUG oslo_concurrency.lockutils [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2607.757456] env[62684]: DEBUG oslo_concurrency.lockutils [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2608.262539] env[62684]: INFO nova.compute.claims [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2608.770120] env[62684]: INFO nova.compute.resource_tracker [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Updating resource usage from migration c6134942-0797-40dd-9ad3-0c97007ad2f1 [ 2608.810081] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbf80122-e25f-45b5-a477-003c135abb64 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2608.818284] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-887d6428-5bdd-4128-93e5-0eec82a3b79b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2608.847603] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23f57135-489a-42d2-b41c-01822f13a1d3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2608.855201] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-425a2085-7049-4a87-9246-a6a66a257b68 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2608.869556] env[62684]: DEBUG nova.compute.provider_tree [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2609.372217] env[62684]: DEBUG nova.scheduler.client.report [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2609.877618] env[62684]: DEBUG oslo_concurrency.lockutils [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.120s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2609.877834] env[62684]: INFO nova.compute.manager [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Migrating [ 2610.392309] env[62684]: DEBUG oslo_concurrency.lockutils [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "refresh_cache-0c7a3cd8-b42a-4111-bcfd-8fcd15b51028" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2610.392730] env[62684]: DEBUG oslo_concurrency.lockutils [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquired lock "refresh_cache-0c7a3cd8-b42a-4111-bcfd-8fcd15b51028" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2610.392730] env[62684]: DEBUG nova.network.neutron [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2611.136227] env[62684]: DEBUG nova.network.neutron [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Updating instance_info_cache with network_info: [{"id": "a81e2573-af12-4cc6-93fb-463eedb4ed4b", "address": "fa:16:3e:6b:cf:c0", "network": {"id": "93f58a85-0f77-4adc-88c5-bee1aa383535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1446072689-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21534190adb0460e9a74363ae059a59d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa81e2573-af", "ovs_interfaceid": "a81e2573-af12-4cc6-93fb-463eedb4ed4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2611.639193] env[62684]: DEBUG oslo_concurrency.lockutils [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Releasing lock "refresh_cache-0c7a3cd8-b42a-4111-bcfd-8fcd15b51028" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2613.154729] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b8f7115-3096-4274-a156-ac27813b3579 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2613.173719] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Updating instance '0c7a3cd8-b42a-4111-bcfd-8fcd15b51028' progress to 0 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2613.680130] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2613.680437] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a4c7438c-8190-4830-9304-f6d6154cf573 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2613.689160] env[62684]: DEBUG oslo_vmware.api [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2613.689160] env[62684]: value = "task-2053965" [ 2613.689160] env[62684]: _type = "Task" [ 2613.689160] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2613.698709] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] VM already powered off {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2613.698911] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Updating instance '0c7a3cd8-b42a-4111-bcfd-8fcd15b51028' progress to 17 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2614.205078] env[62684]: DEBUG nova.virt.hardware [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2614.205498] env[62684]: DEBUG nova.virt.hardware [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2614.205542] env[62684]: DEBUG nova.virt.hardware [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2614.205714] env[62684]: DEBUG nova.virt.hardware [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2614.205867] env[62684]: DEBUG nova.virt.hardware [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2614.206032] env[62684]: DEBUG nova.virt.hardware [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2614.206242] env[62684]: DEBUG nova.virt.hardware [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2614.206406] env[62684]: DEBUG nova.virt.hardware [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2614.206581] env[62684]: DEBUG nova.virt.hardware [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2614.206750] env[62684]: DEBUG nova.virt.hardware [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2614.206930] env[62684]: DEBUG nova.virt.hardware [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2614.211920] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1af505e1-18bd-42fe-92a9-22bb84eeac91 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2614.227610] env[62684]: DEBUG oslo_vmware.api [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2614.227610] env[62684]: value = "task-2053966" [ 2614.227610] env[62684]: _type = "Task" [ 2614.227610] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2614.235454] env[62684]: DEBUG oslo_vmware.api [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053966, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2614.738026] env[62684]: DEBUG oslo_vmware.api [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053966, 'name': ReconfigVM_Task, 'duration_secs': 0.168592} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2614.738378] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Updating instance '0c7a3cd8-b42a-4111-bcfd-8fcd15b51028' progress to 33 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2615.245078] env[62684]: DEBUG nova.virt.hardware [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2615.245525] env[62684]: DEBUG nova.virt.hardware [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2615.245525] env[62684]: DEBUG nova.virt.hardware [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2615.245700] env[62684]: DEBUG nova.virt.hardware [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2615.245841] env[62684]: DEBUG nova.virt.hardware [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2615.246022] env[62684]: DEBUG nova.virt.hardware [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2615.246259] env[62684]: DEBUG nova.virt.hardware [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2615.246431] env[62684]: DEBUG nova.virt.hardware [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2615.246604] env[62684]: DEBUG nova.virt.hardware [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2615.246775] env[62684]: DEBUG nova.virt.hardware [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2615.246970] env[62684]: DEBUG nova.virt.hardware [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2615.252518] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Reconfiguring VM instance instance-0000007a to detach disk 2000 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2615.252820] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-12e94cda-92b9-4126-8bd4-d53d589f19ed {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2615.271031] env[62684]: DEBUG oslo_vmware.api [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2615.271031] env[62684]: value = "task-2053967" [ 2615.271031] env[62684]: _type = "Task" [ 2615.271031] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2615.278493] env[62684]: DEBUG oslo_vmware.api [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053967, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2615.781483] env[62684]: DEBUG oslo_vmware.api [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053967, 'name': ReconfigVM_Task, 'duration_secs': 0.195392} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2615.781792] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Reconfigured VM instance instance-0000007a to detach disk 2000 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2615.782521] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a69b4801-3d25-4e2d-91c3-b2f7508f3cc4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2615.803345] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028/0c7a3cd8-b42a-4111-bcfd-8fcd15b51028.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2615.804087] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-67045299-7e05-4010-8817-44c09a2c66e7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2615.820358] env[62684]: DEBUG oslo_vmware.api [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2615.820358] env[62684]: value = "task-2053968" [ 2615.820358] env[62684]: _type = "Task" [ 2615.820358] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2615.827584] env[62684]: DEBUG oslo_vmware.api [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053968, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2616.330227] env[62684]: DEBUG oslo_vmware.api [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053968, 'name': ReconfigVM_Task, 'duration_secs': 0.238075} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2616.330512] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Reconfigured VM instance instance-0000007a to attach disk [datastore1] 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028/0c7a3cd8-b42a-4111-bcfd-8fcd15b51028.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2616.330762] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Updating instance '0c7a3cd8-b42a-4111-bcfd-8fcd15b51028' progress to 50 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2616.839360] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0c4f793-d417-4e5e-90e5-aba2543ee9cd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2616.857858] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46dcb673-f6c8-4d03-8e39-66f900f21983 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2616.875176] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Updating instance '0c7a3cd8-b42a-4111-bcfd-8fcd15b51028' progress to 67 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2617.413135] env[62684]: DEBUG nova.network.neutron [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Port a81e2573-af12-4cc6-93fb-463eedb4ed4b binding to destination host cpu-1 is already ACTIVE {{(pid=62684) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2618.434368] env[62684]: DEBUG oslo_concurrency.lockutils [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "0c7a3cd8-b42a-4111-bcfd-8fcd15b51028-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2618.434718] env[62684]: DEBUG oslo_concurrency.lockutils [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "0c7a3cd8-b42a-4111-bcfd-8fcd15b51028-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2618.434772] env[62684]: DEBUG oslo_concurrency.lockutils [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "0c7a3cd8-b42a-4111-bcfd-8fcd15b51028-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2619.470592] env[62684]: DEBUG oslo_concurrency.lockutils [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "refresh_cache-0c7a3cd8-b42a-4111-bcfd-8fcd15b51028" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2619.470874] env[62684]: DEBUG oslo_concurrency.lockutils [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquired lock "refresh_cache-0c7a3cd8-b42a-4111-bcfd-8fcd15b51028" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2619.470984] env[62684]: DEBUG nova.network.neutron [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2620.194118] env[62684]: DEBUG nova.network.neutron [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Updating instance_info_cache with network_info: [{"id": "a81e2573-af12-4cc6-93fb-463eedb4ed4b", "address": "fa:16:3e:6b:cf:c0", "network": {"id": "93f58a85-0f77-4adc-88c5-bee1aa383535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1446072689-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21534190adb0460e9a74363ae059a59d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa81e2573-af", "ovs_interfaceid": "a81e2573-af12-4cc6-93fb-463eedb4ed4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2620.697670] env[62684]: DEBUG oslo_concurrency.lockutils [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Releasing lock "refresh_cache-0c7a3cd8-b42a-4111-bcfd-8fcd15b51028" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2621.222262] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3072197-1823-4eca-ac67-73c29a0653a7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2621.241142] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9db1583-bbcc-4649-b806-49c5ab44d8c1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2621.247762] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Updating instance '0c7a3cd8-b42a-4111-bcfd-8fcd15b51028' progress to 83 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2621.753880] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-95a2d5aa-4d3e-4e16-a8ed-844658a20d1c tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Updating instance '0c7a3cd8-b42a-4111-bcfd-8fcd15b51028' progress to 100 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2623.299959] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2623.802312] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2623.802529] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2623.802692] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2623.802934] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62684) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2623.803737] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73a6faaf-4b03-4f00-9376-5174b9285e48 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2623.811783] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8642af06-93f3-460c-a45f-3a6b3098f6e3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2623.826911] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8bce494-e341-4850-b009-330d2dd4ce4c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2623.832943] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd3a7a91-6b4d-4bda-b2b3-0b1519b56d91 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2623.861087] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181048MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=62684) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2623.861243] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2623.861430] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2624.260436] env[62684]: DEBUG oslo_concurrency.lockutils [None req-53962b7e-55c5-4b76-afe6-6f7f0b896912 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "0c7a3cd8-b42a-4111-bcfd-8fcd15b51028" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2624.260712] env[62684]: DEBUG oslo_concurrency.lockutils [None req-53962b7e-55c5-4b76-afe6-6f7f0b896912 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "0c7a3cd8-b42a-4111-bcfd-8fcd15b51028" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2624.260953] env[62684]: DEBUG nova.compute.manager [None req-53962b7e-55c5-4b76-afe6-6f7f0b896912 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Going to confirm migration 9 {{(pid=62684) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 2624.821230] env[62684]: DEBUG oslo_concurrency.lockutils [None req-53962b7e-55c5-4b76-afe6-6f7f0b896912 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "refresh_cache-0c7a3cd8-b42a-4111-bcfd-8fcd15b51028" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2624.821543] env[62684]: DEBUG oslo_concurrency.lockutils [None req-53962b7e-55c5-4b76-afe6-6f7f0b896912 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquired lock "refresh_cache-0c7a3cd8-b42a-4111-bcfd-8fcd15b51028" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2624.821632] env[62684]: DEBUG nova.network.neutron [None req-53962b7e-55c5-4b76-afe6-6f7f0b896912 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2624.821894] env[62684]: DEBUG nova.objects.instance [None req-53962b7e-55c5-4b76-afe6-6f7f0b896912 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lazy-loading 'info_cache' on Instance uuid 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2624.868150] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Applying migration context for instance 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028 as it has an incoming, in-progress migration c6134942-0797-40dd-9ad3-0c97007ad2f1. Migration status is confirming {{(pid=62684) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 2624.868689] env[62684]: INFO nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Updating resource usage from migration c6134942-0797-40dd-9ad3-0c97007ad2f1 [ 2624.884952] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Migration c6134942-0797-40dd-9ad3-0c97007ad2f1 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 2624.884952] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2624.884952] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2624.884952] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=960MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2624.920560] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74fe7240-d269-4317-a729-dd9ff3bd943d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2624.927795] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-233f945c-35a6-4829-8d18-7fe764ce0d09 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2624.958669] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5da45b0-341f-41a1-8fe6-e13c330ebc3f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2624.966482] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b93aa898-dce8-4fe7-8675-7c3c392d0330 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2624.979441] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2625.482227] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2625.986865] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62684) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2625.987139] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.126s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2626.020931] env[62684]: DEBUG nova.network.neutron [None req-53962b7e-55c5-4b76-afe6-6f7f0b896912 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Updating instance_info_cache with network_info: [{"id": "a81e2573-af12-4cc6-93fb-463eedb4ed4b", "address": "fa:16:3e:6b:cf:c0", "network": {"id": "93f58a85-0f77-4adc-88c5-bee1aa383535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1446072689-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21534190adb0460e9a74363ae059a59d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa81e2573-af", "ovs_interfaceid": "a81e2573-af12-4cc6-93fb-463eedb4ed4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2626.524384] env[62684]: DEBUG oslo_concurrency.lockutils [None req-53962b7e-55c5-4b76-afe6-6f7f0b896912 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Releasing lock "refresh_cache-0c7a3cd8-b42a-4111-bcfd-8fcd15b51028" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2626.524647] env[62684]: DEBUG nova.objects.instance [None req-53962b7e-55c5-4b76-afe6-6f7f0b896912 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lazy-loading 'migration_context' on Instance uuid 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2627.028020] env[62684]: DEBUG nova.objects.base [None req-53962b7e-55c5-4b76-afe6-6f7f0b896912 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Object Instance<0c7a3cd8-b42a-4111-bcfd-8fcd15b51028> lazy-loaded attributes: info_cache,migration_context {{(pid=62684) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2627.028987] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40548cf7-4c1b-4708-aabd-a7096858fd6f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2627.049176] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6baaa500-1eea-47d2-92c7-d32495f87805 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2627.054260] env[62684]: DEBUG oslo_vmware.api [None req-53962b7e-55c5-4b76-afe6-6f7f0b896912 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2627.054260] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a11944-3c50-b32b-2636-e8ac46034a32" [ 2627.054260] env[62684]: _type = "Task" [ 2627.054260] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2627.061734] env[62684]: DEBUG oslo_vmware.api [None req-53962b7e-55c5-4b76-afe6-6f7f0b896912 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a11944-3c50-b32b-2636-e8ac46034a32, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2627.564433] env[62684]: DEBUG oslo_vmware.api [None req-53962b7e-55c5-4b76-afe6-6f7f0b896912 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a11944-3c50-b32b-2636-e8ac46034a32, 'name': SearchDatastore_Task, 'duration_secs': 0.010017} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2627.564733] env[62684]: DEBUG oslo_concurrency.lockutils [None req-53962b7e-55c5-4b76-afe6-6f7f0b896912 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2627.564986] env[62684]: DEBUG oslo_concurrency.lockutils [None req-53962b7e-55c5-4b76-afe6-6f7f0b896912 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2627.989961] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2627.991625] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Starting heal instance info cache {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2627.991625] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Rebuilding the list of instances to heal {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2628.101949] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-041ed32f-46c8-4d4d-bef3-ea743f364a29 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2628.109910] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-504c38f2-e161-4250-8c25-f6cfdc099466 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2628.139301] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d57a993-d928-4cae-8543-ba61027418f5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2628.146051] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95427bc6-7b47-4a6f-9afa-a5005a6381e2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2628.158412] env[62684]: DEBUG nova.compute.provider_tree [None req-53962b7e-55c5-4b76-afe6-6f7f0b896912 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2628.520603] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "refresh_cache-0c7a3cd8-b42a-4111-bcfd-8fcd15b51028" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2628.520751] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired lock "refresh_cache-0c7a3cd8-b42a-4111-bcfd-8fcd15b51028" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2628.520901] env[62684]: DEBUG nova.network.neutron [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Forcefully refreshing network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2628.521118] env[62684]: DEBUG nova.objects.instance [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lazy-loading 'info_cache' on Instance uuid 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2628.661823] env[62684]: DEBUG nova.scheduler.client.report [None req-53962b7e-55c5-4b76-afe6-6f7f0b896912 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2629.670978] env[62684]: DEBUG oslo_concurrency.lockutils [None req-53962b7e-55c5-4b76-afe6-6f7f0b896912 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.106s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2629.671379] env[62684]: DEBUG nova.compute.manager [None req-53962b7e-55c5-4b76-afe6-6f7f0b896912 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=62684) _confirm_resize /opt/stack/nova/nova/compute/manager.py:4910}} [ 2630.228100] env[62684]: INFO nova.scheduler.client.report [None req-53962b7e-55c5-4b76-afe6-6f7f0b896912 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Deleted allocation for migration c6134942-0797-40dd-9ad3-0c97007ad2f1 [ 2630.232308] env[62684]: DEBUG nova.network.neutron [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Updating instance_info_cache with network_info: [{"id": "a81e2573-af12-4cc6-93fb-463eedb4ed4b", "address": "fa:16:3e:6b:cf:c0", "network": {"id": "93f58a85-0f77-4adc-88c5-bee1aa383535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1446072689-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21534190adb0460e9a74363ae059a59d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa81e2573-af", "ovs_interfaceid": "a81e2573-af12-4cc6-93fb-463eedb4ed4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2630.735206] env[62684]: DEBUG oslo_concurrency.lockutils [None req-53962b7e-55c5-4b76-afe6-6f7f0b896912 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "0c7a3cd8-b42a-4111-bcfd-8fcd15b51028" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.474s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2630.736462] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Releasing lock "refresh_cache-0c7a3cd8-b42a-4111-bcfd-8fcd15b51028" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2630.736676] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Updated the network info_cache for instance {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 2630.737042] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2630.737217] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2630.737369] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2630.737512] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2630.737655] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2630.737781] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62684) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2631.044187] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2631.123353] env[62684]: DEBUG nova.objects.instance [None req-c1d99a31-fbf2-4472-bf0b-83532b8e6c80 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lazy-loading 'flavor' on Instance uuid 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2631.629156] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c1d99a31-fbf2-4472-bf0b-83532b8e6c80 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "refresh_cache-0c7a3cd8-b42a-4111-bcfd-8fcd15b51028" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2631.629408] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c1d99a31-fbf2-4472-bf0b-83532b8e6c80 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquired lock "refresh_cache-0c7a3cd8-b42a-4111-bcfd-8fcd15b51028" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2631.629517] env[62684]: DEBUG nova.network.neutron [None req-c1d99a31-fbf2-4472-bf0b-83532b8e6c80 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2631.629696] env[62684]: DEBUG nova.objects.instance [None req-c1d99a31-fbf2-4472-bf0b-83532b8e6c80 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lazy-loading 'info_cache' on Instance uuid 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2632.133296] env[62684]: DEBUG nova.objects.base [None req-c1d99a31-fbf2-4472-bf0b-83532b8e6c80 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Object Instance<0c7a3cd8-b42a-4111-bcfd-8fcd15b51028> lazy-loaded attributes: flavor,info_cache {{(pid=62684) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2632.295842] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2632.832762] env[62684]: DEBUG nova.network.neutron [None req-c1d99a31-fbf2-4472-bf0b-83532b8e6c80 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Updating instance_info_cache with network_info: [{"id": "a81e2573-af12-4cc6-93fb-463eedb4ed4b", "address": "fa:16:3e:6b:cf:c0", "network": {"id": "93f58a85-0f77-4adc-88c5-bee1aa383535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1446072689-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21534190adb0460e9a74363ae059a59d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa81e2573-af", "ovs_interfaceid": "a81e2573-af12-4cc6-93fb-463eedb4ed4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2633.337861] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c1d99a31-fbf2-4472-bf0b-83532b8e6c80 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Releasing lock "refresh_cache-0c7a3cd8-b42a-4111-bcfd-8fcd15b51028" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2633.841846] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1d99a31-fbf2-4472-bf0b-83532b8e6c80 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2633.842203] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4bd66723-0475-4975-805c-9557dc954887 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2633.850218] env[62684]: DEBUG oslo_vmware.api [None req-c1d99a31-fbf2-4472-bf0b-83532b8e6c80 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2633.850218] env[62684]: value = "task-2053969" [ 2633.850218] env[62684]: _type = "Task" [ 2633.850218] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2633.857863] env[62684]: DEBUG oslo_vmware.api [None req-c1d99a31-fbf2-4472-bf0b-83532b8e6c80 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053969, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2634.300382] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2634.360676] env[62684]: DEBUG oslo_vmware.api [None req-c1d99a31-fbf2-4472-bf0b-83532b8e6c80 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053969, 'name': PowerOnVM_Task, 'duration_secs': 0.368338} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2634.361073] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1d99a31-fbf2-4472-bf0b-83532b8e6c80 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2634.361147] env[62684]: DEBUG nova.compute.manager [None req-c1d99a31-fbf2-4472-bf0b-83532b8e6c80 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2634.361900] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1497e615-f3d2-4307-abc1-383bfaf0424b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2635.638584] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8012a4d0-fa10-4918-8d87-508edb1197ba tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "0c7a3cd8-b42a-4111-bcfd-8fcd15b51028" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2635.639034] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8012a4d0-fa10-4918-8d87-508edb1197ba tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "0c7a3cd8-b42a-4111-bcfd-8fcd15b51028" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2635.639120] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8012a4d0-fa10-4918-8d87-508edb1197ba tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "0c7a3cd8-b42a-4111-bcfd-8fcd15b51028-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2635.639261] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8012a4d0-fa10-4918-8d87-508edb1197ba tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "0c7a3cd8-b42a-4111-bcfd-8fcd15b51028-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2635.639455] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8012a4d0-fa10-4918-8d87-508edb1197ba tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "0c7a3cd8-b42a-4111-bcfd-8fcd15b51028-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2635.641625] env[62684]: INFO nova.compute.manager [None req-8012a4d0-fa10-4918-8d87-508edb1197ba tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Terminating instance [ 2635.643382] env[62684]: DEBUG nova.compute.manager [None req-8012a4d0-fa10-4918-8d87-508edb1197ba tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2635.643617] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8012a4d0-fa10-4918-8d87-508edb1197ba tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2635.644497] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3102c99-6aa3-496a-bda0-2bec873dc48c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2635.652371] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8012a4d0-fa10-4918-8d87-508edb1197ba tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2635.652595] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3cbca1fc-b773-41d1-948f-c69a95c4515e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2635.658319] env[62684]: DEBUG oslo_vmware.api [None req-8012a4d0-fa10-4918-8d87-508edb1197ba tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2635.658319] env[62684]: value = "task-2053970" [ 2635.658319] env[62684]: _type = "Task" [ 2635.658319] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2635.666183] env[62684]: DEBUG oslo_vmware.api [None req-8012a4d0-fa10-4918-8d87-508edb1197ba tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053970, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2636.168543] env[62684]: DEBUG oslo_vmware.api [None req-8012a4d0-fa10-4918-8d87-508edb1197ba tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053970, 'name': PowerOffVM_Task, 'duration_secs': 0.173143} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2636.168814] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8012a4d0-fa10-4918-8d87-508edb1197ba tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2636.168991] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8012a4d0-fa10-4918-8d87-508edb1197ba tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2636.169260] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aeb0da04-8c44-4cb0-ac26-ee212b150903 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2642.351454] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8012a4d0-fa10-4918-8d87-508edb1197ba tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2642.351865] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8012a4d0-fa10-4918-8d87-508edb1197ba tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2642.351865] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-8012a4d0-fa10-4918-8d87-508edb1197ba tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Deleting the datastore file [datastore1] 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2642.352183] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b7be59aa-cd24-4bc5-a92a-91d93b361f2c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2642.360208] env[62684]: DEBUG oslo_vmware.api [None req-8012a4d0-fa10-4918-8d87-508edb1197ba tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2642.360208] env[62684]: value = "task-2053972" [ 2642.360208] env[62684]: _type = "Task" [ 2642.360208] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2642.370947] env[62684]: DEBUG oslo_vmware.api [None req-8012a4d0-fa10-4918-8d87-508edb1197ba tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053972, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2642.871453] env[62684]: DEBUG oslo_vmware.api [None req-8012a4d0-fa10-4918-8d87-508edb1197ba tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053972, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147015} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2642.871724] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-8012a4d0-fa10-4918-8d87-508edb1197ba tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2642.871935] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8012a4d0-fa10-4918-8d87-508edb1197ba tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2642.872199] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8012a4d0-fa10-4918-8d87-508edb1197ba tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2642.872394] env[62684]: INFO nova.compute.manager [None req-8012a4d0-fa10-4918-8d87-508edb1197ba tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Took 7.23 seconds to destroy the instance on the hypervisor. [ 2642.872627] env[62684]: DEBUG oslo.service.loopingcall [None req-8012a4d0-fa10-4918-8d87-508edb1197ba tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2642.872821] env[62684]: DEBUG nova.compute.manager [-] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2642.872916] env[62684]: DEBUG nova.network.neutron [-] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2643.342832] env[62684]: DEBUG nova.compute.manager [req-0227d501-e9d5-465f-88a7-864f403f5a9e req-faa67cbb-bcc5-48c2-ac71-758e7ff23f99 service nova] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Received event network-vif-deleted-a81e2573-af12-4cc6-93fb-463eedb4ed4b {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2643.343052] env[62684]: INFO nova.compute.manager [req-0227d501-e9d5-465f-88a7-864f403f5a9e req-faa67cbb-bcc5-48c2-ac71-758e7ff23f99 service nova] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Neutron deleted interface a81e2573-af12-4cc6-93fb-463eedb4ed4b; detaching it from the instance and deleting it from the info cache [ 2643.343240] env[62684]: DEBUG nova.network.neutron [req-0227d501-e9d5-465f-88a7-864f403f5a9e req-faa67cbb-bcc5-48c2-ac71-758e7ff23f99 service nova] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2643.783775] env[62684]: DEBUG nova.network.neutron [-] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2643.845394] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c5a49750-789b-4e19-a3cf-a27ac39ae2f7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2643.855727] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddc69570-f942-444d-b6c6-5fe445ad26ec {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2643.880184] env[62684]: DEBUG nova.compute.manager [req-0227d501-e9d5-465f-88a7-864f403f5a9e req-faa67cbb-bcc5-48c2-ac71-758e7ff23f99 service nova] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Detach interface failed, port_id=a81e2573-af12-4cc6-93fb-463eedb4ed4b, reason: Instance 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028 could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2644.287206] env[62684]: INFO nova.compute.manager [-] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Took 1.41 seconds to deallocate network for instance. [ 2644.794507] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8012a4d0-fa10-4918-8d87-508edb1197ba tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2644.794883] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8012a4d0-fa10-4918-8d87-508edb1197ba tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2644.794933] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8012a4d0-fa10-4918-8d87-508edb1197ba tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2644.819973] env[62684]: INFO nova.scheduler.client.report [None req-8012a4d0-fa10-4918-8d87-508edb1197ba tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Deleted allocations for instance 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028 [ 2645.327456] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8012a4d0-fa10-4918-8d87-508edb1197ba tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "0c7a3cd8-b42a-4111-bcfd-8fcd15b51028" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.688s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2645.985520] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "d338d036-f969-41d4-8986-62b043e5ad2f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2645.985791] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "d338d036-f969-41d4-8986-62b043e5ad2f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2646.487891] env[62684]: DEBUG nova.compute.manager [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2647.005379] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2647.005630] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2647.007441] env[62684]: INFO nova.compute.claims [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2648.042517] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2b54d00-5d83-4356-8197-999b29a8055e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2648.050162] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2d948cc-0dcc-4486-a1ab-4833eaffb90c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2648.079152] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-689fe0ff-5b12-4953-9049-84b70f68a502 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2648.085904] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5eccca4-330a-4637-acdb-464dd6506a9f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2648.099554] env[62684]: DEBUG nova.compute.provider_tree [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2648.603237] env[62684]: DEBUG nova.scheduler.client.report [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2649.108626] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.103s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2649.109209] env[62684]: DEBUG nova.compute.manager [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2649.614462] env[62684]: DEBUG nova.compute.utils [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2649.616247] env[62684]: DEBUG nova.compute.manager [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2649.616423] env[62684]: DEBUG nova.network.neutron [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2649.666589] env[62684]: DEBUG nova.policy [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '22544927e67845a69c8ac324918f2e93', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '21534190adb0460e9a74363ae059a59d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2649.924924] env[62684]: DEBUG nova.network.neutron [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Successfully created port: b2731d5e-8639-4c22-bcd9-a86b875143cd {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2650.119435] env[62684]: DEBUG nova.compute.manager [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2651.130324] env[62684]: DEBUG nova.compute.manager [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2651.156691] env[62684]: DEBUG nova.virt.hardware [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2651.156942] env[62684]: DEBUG nova.virt.hardware [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2651.157124] env[62684]: DEBUG nova.virt.hardware [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2651.157321] env[62684]: DEBUG nova.virt.hardware [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2651.157473] env[62684]: DEBUG nova.virt.hardware [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2651.157622] env[62684]: DEBUG nova.virt.hardware [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2651.157826] env[62684]: DEBUG nova.virt.hardware [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2651.157990] env[62684]: DEBUG nova.virt.hardware [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2651.158253] env[62684]: DEBUG nova.virt.hardware [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2651.158432] env[62684]: DEBUG nova.virt.hardware [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2651.158611] env[62684]: DEBUG nova.virt.hardware [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2651.159512] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4b655e3-15c2-45b8-89c2-74821a4613b5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2651.167809] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b6d145c-6852-4419-b518-d675f6f0a69a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2651.289723] env[62684]: DEBUG nova.compute.manager [req-14d66737-32a5-48dd-ae2a-a18b358a3e08 req-3d9d80ef-8e83-4914-bd74-a138c22daf09 service nova] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Received event network-vif-plugged-b2731d5e-8639-4c22-bcd9-a86b875143cd {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2651.290070] env[62684]: DEBUG oslo_concurrency.lockutils [req-14d66737-32a5-48dd-ae2a-a18b358a3e08 req-3d9d80ef-8e83-4914-bd74-a138c22daf09 service nova] Acquiring lock "d338d036-f969-41d4-8986-62b043e5ad2f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2651.290183] env[62684]: DEBUG oslo_concurrency.lockutils [req-14d66737-32a5-48dd-ae2a-a18b358a3e08 req-3d9d80ef-8e83-4914-bd74-a138c22daf09 service nova] Lock "d338d036-f969-41d4-8986-62b043e5ad2f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2651.290359] env[62684]: DEBUG oslo_concurrency.lockutils [req-14d66737-32a5-48dd-ae2a-a18b358a3e08 req-3d9d80ef-8e83-4914-bd74-a138c22daf09 service nova] Lock "d338d036-f969-41d4-8986-62b043e5ad2f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2651.290529] env[62684]: DEBUG nova.compute.manager [req-14d66737-32a5-48dd-ae2a-a18b358a3e08 req-3d9d80ef-8e83-4914-bd74-a138c22daf09 service nova] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] No waiting events found dispatching network-vif-plugged-b2731d5e-8639-4c22-bcd9-a86b875143cd {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2651.290707] env[62684]: WARNING nova.compute.manager [req-14d66737-32a5-48dd-ae2a-a18b358a3e08 req-3d9d80ef-8e83-4914-bd74-a138c22daf09 service nova] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Received unexpected event network-vif-plugged-b2731d5e-8639-4c22-bcd9-a86b875143cd for instance with vm_state building and task_state spawning. [ 2651.366268] env[62684]: DEBUG nova.network.neutron [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Successfully updated port: b2731d5e-8639-4c22-bcd9-a86b875143cd {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2651.868900] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "refresh_cache-d338d036-f969-41d4-8986-62b043e5ad2f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2651.869067] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquired lock "refresh_cache-d338d036-f969-41d4-8986-62b043e5ad2f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2651.870040] env[62684]: DEBUG nova.network.neutron [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2652.401016] env[62684]: DEBUG nova.network.neutron [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2652.523220] env[62684]: DEBUG nova.network.neutron [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Updating instance_info_cache with network_info: [{"id": "b2731d5e-8639-4c22-bcd9-a86b875143cd", "address": "fa:16:3e:0a:5b:88", "network": {"id": "93f58a85-0f77-4adc-88c5-bee1aa383535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1446072689-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21534190adb0460e9a74363ae059a59d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2731d5e-86", "ovs_interfaceid": "b2731d5e-8639-4c22-bcd9-a86b875143cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2653.026608] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Releasing lock "refresh_cache-d338d036-f969-41d4-8986-62b043e5ad2f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2653.026963] env[62684]: DEBUG nova.compute.manager [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Instance network_info: |[{"id": "b2731d5e-8639-4c22-bcd9-a86b875143cd", "address": "fa:16:3e:0a:5b:88", "network": {"id": "93f58a85-0f77-4adc-88c5-bee1aa383535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1446072689-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21534190adb0460e9a74363ae059a59d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2731d5e-86", "ovs_interfaceid": "b2731d5e-8639-4c22-bcd9-a86b875143cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2653.027470] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0a:5b:88', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7874ee7f-20c7-4bd8-a750-ed489e9acc65', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b2731d5e-8639-4c22-bcd9-a86b875143cd', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2653.035081] env[62684]: DEBUG oslo.service.loopingcall [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2653.035314] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2653.035563] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eec7bee3-8e3a-407c-8b6c-9313169802f6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2653.056182] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2653.056182] env[62684]: value = "task-2053973" [ 2653.056182] env[62684]: _type = "Task" [ 2653.056182] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2653.063812] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053973, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2653.314866] env[62684]: DEBUG nova.compute.manager [req-b4cf6bf2-100d-4228-8f74-c85d4f55ba4c req-73142420-642b-4dc0-b346-ae9cda56df5f service nova] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Received event network-changed-b2731d5e-8639-4c22-bcd9-a86b875143cd {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2653.315165] env[62684]: DEBUG nova.compute.manager [req-b4cf6bf2-100d-4228-8f74-c85d4f55ba4c req-73142420-642b-4dc0-b346-ae9cda56df5f service nova] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Refreshing instance network info cache due to event network-changed-b2731d5e-8639-4c22-bcd9-a86b875143cd. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2653.315395] env[62684]: DEBUG oslo_concurrency.lockutils [req-b4cf6bf2-100d-4228-8f74-c85d4f55ba4c req-73142420-642b-4dc0-b346-ae9cda56df5f service nova] Acquiring lock "refresh_cache-d338d036-f969-41d4-8986-62b043e5ad2f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2653.315573] env[62684]: DEBUG oslo_concurrency.lockutils [req-b4cf6bf2-100d-4228-8f74-c85d4f55ba4c req-73142420-642b-4dc0-b346-ae9cda56df5f service nova] Acquired lock "refresh_cache-d338d036-f969-41d4-8986-62b043e5ad2f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2653.315761] env[62684]: DEBUG nova.network.neutron [req-b4cf6bf2-100d-4228-8f74-c85d4f55ba4c req-73142420-642b-4dc0-b346-ae9cda56df5f service nova] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Refreshing network info cache for port b2731d5e-8639-4c22-bcd9-a86b875143cd {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2653.566430] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2053973, 'name': CreateVM_Task, 'duration_secs': 0.425677} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2653.566804] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2653.567296] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2653.567470] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2653.567817] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2653.568087] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8af1e88b-a8a0-4229-8240-e136de8485db {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2653.572875] env[62684]: DEBUG oslo_vmware.api [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2653.572875] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52abd202-18cd-018c-ed79-b14f646e878a" [ 2653.572875] env[62684]: _type = "Task" [ 2653.572875] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2653.580125] env[62684]: DEBUG oslo_vmware.api [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52abd202-18cd-018c-ed79-b14f646e878a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2654.015230] env[62684]: DEBUG nova.network.neutron [req-b4cf6bf2-100d-4228-8f74-c85d4f55ba4c req-73142420-642b-4dc0-b346-ae9cda56df5f service nova] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Updated VIF entry in instance network info cache for port b2731d5e-8639-4c22-bcd9-a86b875143cd. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2654.015613] env[62684]: DEBUG nova.network.neutron [req-b4cf6bf2-100d-4228-8f74-c85d4f55ba4c req-73142420-642b-4dc0-b346-ae9cda56df5f service nova] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Updating instance_info_cache with network_info: [{"id": "b2731d5e-8639-4c22-bcd9-a86b875143cd", "address": "fa:16:3e:0a:5b:88", "network": {"id": "93f58a85-0f77-4adc-88c5-bee1aa383535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1446072689-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21534190adb0460e9a74363ae059a59d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2731d5e-86", "ovs_interfaceid": "b2731d5e-8639-4c22-bcd9-a86b875143cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2654.083312] env[62684]: DEBUG oslo_vmware.api [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52abd202-18cd-018c-ed79-b14f646e878a, 'name': SearchDatastore_Task, 'duration_secs': 0.009957} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2654.083633] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2654.083875] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2654.084180] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2654.084340] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2654.084519] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2654.084779] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4316b0fd-608e-4b41-811c-efca79e62725 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2654.093243] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2654.093453] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2654.094177] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f3c0fab-0adf-48a4-8bf4-564ae7a2d2ba {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2654.099123] env[62684]: DEBUG oslo_vmware.api [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2654.099123] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520dcc37-68c5-159d-b3bf-06ef1b3d7741" [ 2654.099123] env[62684]: _type = "Task" [ 2654.099123] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2654.106183] env[62684]: DEBUG oslo_vmware.api [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520dcc37-68c5-159d-b3bf-06ef1b3d7741, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2654.518341] env[62684]: DEBUG oslo_concurrency.lockutils [req-b4cf6bf2-100d-4228-8f74-c85d4f55ba4c req-73142420-642b-4dc0-b346-ae9cda56df5f service nova] Releasing lock "refresh_cache-d338d036-f969-41d4-8986-62b043e5ad2f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2654.610015] env[62684]: DEBUG oslo_vmware.api [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520dcc37-68c5-159d-b3bf-06ef1b3d7741, 'name': SearchDatastore_Task, 'duration_secs': 0.008501} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2654.610801] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59def3e4-aded-4ca7-82f5-978752842464 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2654.616152] env[62684]: DEBUG oslo_vmware.api [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2654.616152] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]528c56af-8eb0-5597-526d-c310dfe6e9a7" [ 2654.616152] env[62684]: _type = "Task" [ 2654.616152] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2654.623624] env[62684]: DEBUG oslo_vmware.api [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]528c56af-8eb0-5597-526d-c310dfe6e9a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2655.126088] env[62684]: DEBUG oslo_vmware.api [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]528c56af-8eb0-5597-526d-c310dfe6e9a7, 'name': SearchDatastore_Task, 'duration_secs': 0.009179} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2655.126366] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2655.126642] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] d338d036-f969-41d4-8986-62b043e5ad2f/d338d036-f969-41d4-8986-62b043e5ad2f.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2655.126904] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d6028c5e-64a4-4ed2-aeee-8e672a84860d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2655.133212] env[62684]: DEBUG oslo_vmware.api [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2655.133212] env[62684]: value = "task-2053974" [ 2655.133212] env[62684]: _type = "Task" [ 2655.133212] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2655.140872] env[62684]: DEBUG oslo_vmware.api [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053974, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2655.643170] env[62684]: DEBUG oslo_vmware.api [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053974, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.43395} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2655.643560] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] d338d036-f969-41d4-8986-62b043e5ad2f/d338d036-f969-41d4-8986-62b043e5ad2f.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2655.643644] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2655.643890] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7c50e3cf-079a-4b44-a30f-ef99814b0e91 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2655.650629] env[62684]: DEBUG oslo_vmware.api [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2655.650629] env[62684]: value = "task-2053975" [ 2655.650629] env[62684]: _type = "Task" [ 2655.650629] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2655.658557] env[62684]: DEBUG oslo_vmware.api [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053975, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2656.160357] env[62684]: DEBUG oslo_vmware.api [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053975, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.054331} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2656.160645] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2656.161420] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ba98db6-d00c-416f-a5f6-122f90fcf811 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2656.182566] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] d338d036-f969-41d4-8986-62b043e5ad2f/d338d036-f969-41d4-8986-62b043e5ad2f.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2656.182809] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f6c169e7-2ad3-45c4-9440-b17aac1a9061 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2656.202383] env[62684]: DEBUG oslo_vmware.api [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2656.202383] env[62684]: value = "task-2053976" [ 2656.202383] env[62684]: _type = "Task" [ 2656.202383] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2656.210123] env[62684]: DEBUG oslo_vmware.api [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053976, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2656.712839] env[62684]: DEBUG oslo_vmware.api [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053976, 'name': ReconfigVM_Task, 'duration_secs': 0.2575} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2656.713280] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Reconfigured VM instance instance-0000007b to attach disk [datastore1] d338d036-f969-41d4-8986-62b043e5ad2f/d338d036-f969-41d4-8986-62b043e5ad2f.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2656.713855] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d90b9c31-f21f-4e32-bb18-d971a2b939e7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2656.720147] env[62684]: DEBUG oslo_vmware.api [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2656.720147] env[62684]: value = "task-2053977" [ 2656.720147] env[62684]: _type = "Task" [ 2656.720147] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2656.728013] env[62684]: DEBUG oslo_vmware.api [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053977, 'name': Rename_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2657.230537] env[62684]: DEBUG oslo_vmware.api [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053977, 'name': Rename_Task, 'duration_secs': 0.149758} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2657.230818] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2657.231095] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ac7f4f55-6e2c-400c-96ce-57f98f5cdf36 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2657.237415] env[62684]: DEBUG oslo_vmware.api [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2657.237415] env[62684]: value = "task-2053978" [ 2657.237415] env[62684]: _type = "Task" [ 2657.237415] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2657.244577] env[62684]: DEBUG oslo_vmware.api [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053978, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2657.748116] env[62684]: DEBUG oslo_vmware.api [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053978, 'name': PowerOnVM_Task, 'duration_secs': 0.429169} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2657.748484] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2657.748582] env[62684]: INFO nova.compute.manager [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Took 6.62 seconds to spawn the instance on the hypervisor. [ 2657.748736] env[62684]: DEBUG nova.compute.manager [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2657.749517] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b231d4a-9489-40ce-8b28-320edd08aced {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2658.267909] env[62684]: INFO nova.compute.manager [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Took 11.28 seconds to build instance. [ 2658.770353] env[62684]: DEBUG oslo_concurrency.lockutils [None req-1105467e-5f69-4029-aaa3-1320fbe6bdc6 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "d338d036-f969-41d4-8986-62b043e5ad2f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.784s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2658.969314] env[62684]: DEBUG nova.compute.manager [req-c407181e-60f1-44b4-be9d-c50b3dd59027 req-93caafc4-d9f8-48f6-91c2-a87ec2689153 service nova] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Received event network-changed-b2731d5e-8639-4c22-bcd9-a86b875143cd {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2658.969558] env[62684]: DEBUG nova.compute.manager [req-c407181e-60f1-44b4-be9d-c50b3dd59027 req-93caafc4-d9f8-48f6-91c2-a87ec2689153 service nova] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Refreshing instance network info cache due to event network-changed-b2731d5e-8639-4c22-bcd9-a86b875143cd. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2658.969781] env[62684]: DEBUG oslo_concurrency.lockutils [req-c407181e-60f1-44b4-be9d-c50b3dd59027 req-93caafc4-d9f8-48f6-91c2-a87ec2689153 service nova] Acquiring lock "refresh_cache-d338d036-f969-41d4-8986-62b043e5ad2f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2658.969931] env[62684]: DEBUG oslo_concurrency.lockutils [req-c407181e-60f1-44b4-be9d-c50b3dd59027 req-93caafc4-d9f8-48f6-91c2-a87ec2689153 service nova] Acquired lock "refresh_cache-d338d036-f969-41d4-8986-62b043e5ad2f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2658.970113] env[62684]: DEBUG nova.network.neutron [req-c407181e-60f1-44b4-be9d-c50b3dd59027 req-93caafc4-d9f8-48f6-91c2-a87ec2689153 service nova] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Refreshing network info cache for port b2731d5e-8639-4c22-bcd9-a86b875143cd {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2659.669382] env[62684]: DEBUG nova.network.neutron [req-c407181e-60f1-44b4-be9d-c50b3dd59027 req-93caafc4-d9f8-48f6-91c2-a87ec2689153 service nova] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Updated VIF entry in instance network info cache for port b2731d5e-8639-4c22-bcd9-a86b875143cd. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2659.669770] env[62684]: DEBUG nova.network.neutron [req-c407181e-60f1-44b4-be9d-c50b3dd59027 req-93caafc4-d9f8-48f6-91c2-a87ec2689153 service nova] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Updating instance_info_cache with network_info: [{"id": "b2731d5e-8639-4c22-bcd9-a86b875143cd", "address": "fa:16:3e:0a:5b:88", "network": {"id": "93f58a85-0f77-4adc-88c5-bee1aa383535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1446072689-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21534190adb0460e9a74363ae059a59d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2731d5e-86", "ovs_interfaceid": "b2731d5e-8639-4c22-bcd9-a86b875143cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2660.173544] env[62684]: DEBUG oslo_concurrency.lockutils [req-c407181e-60f1-44b4-be9d-c50b3dd59027 req-93caafc4-d9f8-48f6-91c2-a87ec2689153 service nova] Releasing lock "refresh_cache-d338d036-f969-41d4-8986-62b043e5ad2f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2683.300751] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2683.803233] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2683.803508] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2683.803634] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2683.803793] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62684) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2683.804767] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ff34929-515b-47d6-87ee-339112f0615b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2683.812790] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fb6889d-e11d-4f70-8935-e541282028d1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2683.826535] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-719f4415-6de0-4a83-87f1-bf3b453acd9a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2683.832348] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f51de1c-63e4-4764-b3c5-f15335a00a26 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2683.859980] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181345MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=62684) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2683.860125] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2683.860305] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2684.885409] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance d338d036-f969-41d4-8986-62b043e5ad2f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2684.885698] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2684.885879] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2684.912370] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80740946-106a-43e6-aa03-a5a7df10c845 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2684.919799] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b29275c2-9607-4c15-8492-86b87ef47801 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2684.948295] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5f054a3-9db9-48f5-9d94-2ad72be05091 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2684.955501] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-224f64d0-e9c5-4a18-8554-9bb65d37d67e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2684.967954] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2685.470865] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2685.975580] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62684) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2685.975983] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.115s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2687.977118] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2687.977118] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Starting heal instance info cache {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2687.977118] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Rebuilding the list of instances to heal {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2688.507361] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "refresh_cache-d338d036-f969-41d4-8986-62b043e5ad2f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2688.507508] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired lock "refresh_cache-d338d036-f969-41d4-8986-62b043e5ad2f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2688.507656] env[62684]: DEBUG nova.network.neutron [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Forcefully refreshing network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2688.507810] env[62684]: DEBUG nova.objects.instance [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lazy-loading 'info_cache' on Instance uuid d338d036-f969-41d4-8986-62b043e5ad2f {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2690.216206] env[62684]: DEBUG nova.network.neutron [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Updating instance_info_cache with network_info: [{"id": "b2731d5e-8639-4c22-bcd9-a86b875143cd", "address": "fa:16:3e:0a:5b:88", "network": {"id": "93f58a85-0f77-4adc-88c5-bee1aa383535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1446072689-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21534190adb0460e9a74363ae059a59d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2731d5e-86", "ovs_interfaceid": "b2731d5e-8639-4c22-bcd9-a86b875143cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2690.719233] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Releasing lock "refresh_cache-d338d036-f969-41d4-8986-62b043e5ad2f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2690.719482] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Updated the network info_cache for instance {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 2690.719656] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2690.719824] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2690.719972] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2690.720130] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2690.720273] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2690.720398] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62684) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2690.720551] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2690.720670] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Cleaning up deleted instances {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 2691.227610] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] There are 13 instances to clean {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 2691.228163] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 0c7a3cd8-b42a-4111-bcfd-8fcd15b51028] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2691.731601] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 84773cfa-edee-44bc-b89d-490d1fef5417] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2692.234571] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 4081f322-a854-475a-9a66-3d573128f39d] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2692.738218] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 603b2c96-44f1-45a7-8209-b799662a3e42] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2693.242173] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 8d22d555-f837-4eb3-9474-c1434649584e] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2693.746201] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 7feaf3cf-85c1-462f-bfd5-1ed4f3e6b51d] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2694.249418] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: d540b43f-5bf9-47df-b319-97a1bae7ffc0] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2694.732117] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c01faacb-ca0b-4b31-b767-f067aafe4583 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "d338d036-f969-41d4-8986-62b043e5ad2f" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2694.732397] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c01faacb-ca0b-4b31-b767-f067aafe4583 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "d338d036-f969-41d4-8986-62b043e5ad2f" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2694.753129] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: ea4d273e-e2a7-4c4c-9afb-67fc3f5682a5] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2695.235252] env[62684]: DEBUG nova.compute.utils [None req-c01faacb-ca0b-4b31-b767-f067aafe4583 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2695.255477] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: b0ddbec0-d578-46df-93fd-9d38c939bd77] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2695.738569] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c01faacb-ca0b-4b31-b767-f067aafe4583 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "d338d036-f969-41d4-8986-62b043e5ad2f" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2695.757781] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: f7b61e23-fe0d-41fb-9100-d07cd8cb2d04] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2696.260485] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: b88d9418-7e90-473e-bd9a-18bc398faad0] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2696.763774] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 5b3668f3-219d-4304-bc9e-9b911762085d] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2696.802239] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c01faacb-ca0b-4b31-b767-f067aafe4583 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "d338d036-f969-41d4-8986-62b043e5ad2f" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2696.802492] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c01faacb-ca0b-4b31-b767-f067aafe4583 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "d338d036-f969-41d4-8986-62b043e5ad2f" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2696.802763] env[62684]: INFO nova.compute.manager [None req-c01faacb-ca0b-4b31-b767-f067aafe4583 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Attaching volume ff65c5b8-13bd-4a60-ab40-fbe61d0e4057 to /dev/sdb [ 2696.832447] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f76aae16-87dc-4f41-abd8-11eedbcf454e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2696.839378] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-985d0243-9675-461b-b3f0-d567299a8780 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2696.852926] env[62684]: DEBUG nova.virt.block_device [None req-c01faacb-ca0b-4b31-b767-f067aafe4583 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Updating existing volume attachment record: c13d859a-39c5-4174-af8b-b9d13369ebc4 {{(pid=62684) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2697.267583] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 0156d807-1ab4-482f-91d1-172bf32bf23c] Instance has had 0 of 5 cleanup attempts {{(pid=62684) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2700.090381] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2700.090782] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2701.394790] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-c01faacb-ca0b-4b31-b767-f067aafe4583 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Volume attach. Driver type: vmdk {{(pid=62684) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2701.395070] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-c01faacb-ca0b-4b31-b767-f067aafe4583 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421443', 'volume_id': 'ff65c5b8-13bd-4a60-ab40-fbe61d0e4057', 'name': 'volume-ff65c5b8-13bd-4a60-ab40-fbe61d0e4057', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd338d036-f969-41d4-8986-62b043e5ad2f', 'attached_at': '', 'detached_at': '', 'volume_id': 'ff65c5b8-13bd-4a60-ab40-fbe61d0e4057', 'serial': 'ff65c5b8-13bd-4a60-ab40-fbe61d0e4057'} {{(pid=62684) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2701.396289] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb85e0cc-13e0-4d8b-ace1-7ba75c8e8db1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2701.412491] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f01358ef-f3a4-4691-94ba-abefb374302d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2701.436855] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-c01faacb-ca0b-4b31-b767-f067aafe4583 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] volume-ff65c5b8-13bd-4a60-ab40-fbe61d0e4057/volume-ff65c5b8-13bd-4a60-ab40-fbe61d0e4057.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2701.437120] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7bcabe2f-fef2-4fa0-a769-951ed6cb13ea {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2701.453979] env[62684]: DEBUG oslo_vmware.api [None req-c01faacb-ca0b-4b31-b767-f067aafe4583 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2701.453979] env[62684]: value = "task-2053983" [ 2701.453979] env[62684]: _type = "Task" [ 2701.453979] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2701.461217] env[62684]: DEBUG oslo_vmware.api [None req-c01faacb-ca0b-4b31-b767-f067aafe4583 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053983, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2701.963558] env[62684]: DEBUG oslo_vmware.api [None req-c01faacb-ca0b-4b31-b767-f067aafe4583 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053983, 'name': ReconfigVM_Task, 'duration_secs': 0.319684} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2701.963830] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-c01faacb-ca0b-4b31-b767-f067aafe4583 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Reconfigured VM instance instance-0000007b to attach disk [datastore1] volume-ff65c5b8-13bd-4a60-ab40-fbe61d0e4057/volume-ff65c5b8-13bd-4a60-ab40-fbe61d0e4057.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2701.968512] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-32720883-b2e4-4bb0-9c35-4ed50dc7ce3e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2701.983427] env[62684]: DEBUG oslo_vmware.api [None req-c01faacb-ca0b-4b31-b767-f067aafe4583 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2701.983427] env[62684]: value = "task-2053984" [ 2701.983427] env[62684]: _type = "Task" [ 2701.983427] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2701.990995] env[62684]: DEBUG oslo_vmware.api [None req-c01faacb-ca0b-4b31-b767-f067aafe4583 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053984, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2702.494029] env[62684]: DEBUG oslo_vmware.api [None req-c01faacb-ca0b-4b31-b767-f067aafe4583 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053984, 'name': ReconfigVM_Task, 'duration_secs': 0.133207} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2702.494029] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-c01faacb-ca0b-4b31-b767-f067aafe4583 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421443', 'volume_id': 'ff65c5b8-13bd-4a60-ab40-fbe61d0e4057', 'name': 'volume-ff65c5b8-13bd-4a60-ab40-fbe61d0e4057', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd338d036-f969-41d4-8986-62b043e5ad2f', 'attached_at': '', 'detached_at': '', 'volume_id': 'ff65c5b8-13bd-4a60-ab40-fbe61d0e4057', 'serial': 'ff65c5b8-13bd-4a60-ab40-fbe61d0e4057'} {{(pid=62684) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2703.300636] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2703.528807] env[62684]: DEBUG nova.objects.instance [None req-c01faacb-ca0b-4b31-b767-f067aafe4583 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lazy-loading 'flavor' on Instance uuid d338d036-f969-41d4-8986-62b043e5ad2f {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2704.033828] env[62684]: DEBUG oslo_concurrency.lockutils [None req-c01faacb-ca0b-4b31-b767-f067aafe4583 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "d338d036-f969-41d4-8986-62b043e5ad2f" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.231s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2704.904038] env[62684]: DEBUG nova.compute.manager [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Stashing vm_state: active {{(pid=62684) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 2705.423601] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2705.423890] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2705.929592] env[62684]: INFO nova.compute.claims [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2706.436656] env[62684]: INFO nova.compute.resource_tracker [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Updating resource usage from migration 1c62b14f-25a3-44a9-8ebe-c27b0b3cf92e [ 2706.472994] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-587fc545-91df-4734-88ed-86b3723f59ab {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2706.481060] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26e2500a-a92a-4637-827c-6b964503082b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2706.510299] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-794bcc81-ebf2-4159-b230-f6d9225a5515 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2706.517296] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-914e5c57-5fc5-47f9-aa40-46b4aa43c425 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2706.529928] env[62684]: DEBUG nova.compute.provider_tree [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2707.034116] env[62684]: DEBUG nova.scheduler.client.report [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2707.540736] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.117s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2707.541090] env[62684]: INFO nova.compute.manager [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Migrating [ 2708.057743] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "refresh_cache-d338d036-f969-41d4-8986-62b043e5ad2f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2708.058261] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquired lock "refresh_cache-d338d036-f969-41d4-8986-62b043e5ad2f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2708.058261] env[62684]: DEBUG nova.network.neutron [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2708.764488] env[62684]: DEBUG nova.network.neutron [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Updating instance_info_cache with network_info: [{"id": "b2731d5e-8639-4c22-bcd9-a86b875143cd", "address": "fa:16:3e:0a:5b:88", "network": {"id": "93f58a85-0f77-4adc-88c5-bee1aa383535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1446072689-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21534190adb0460e9a74363ae059a59d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2731d5e-86", "ovs_interfaceid": "b2731d5e-8639-4c22-bcd9-a86b875143cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2709.267319] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Releasing lock "refresh_cache-d338d036-f969-41d4-8986-62b043e5ad2f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2710.783562] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ba78231-6f9b-4245-a717-9d72689678a6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2710.804266] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Updating instance 'd338d036-f969-41d4-8986-62b043e5ad2f' progress to 0 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2711.310456] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2711.310790] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b9f75c9c-1d3f-4abe-8152-91aed2451fc6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2711.318660] env[62684]: DEBUG oslo_vmware.api [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2711.318660] env[62684]: value = "task-2053985" [ 2711.318660] env[62684]: _type = "Task" [ 2711.318660] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2711.326318] env[62684]: DEBUG oslo_vmware.api [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053985, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2711.828672] env[62684]: DEBUG oslo_vmware.api [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053985, 'name': PowerOffVM_Task, 'duration_secs': 0.175468} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2711.829086] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2711.829197] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Updating instance 'd338d036-f969-41d4-8986-62b043e5ad2f' progress to 17 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2712.336222] env[62684]: DEBUG nova.virt.hardware [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2712.336500] env[62684]: DEBUG nova.virt.hardware [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2712.336624] env[62684]: DEBUG nova.virt.hardware [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2712.336821] env[62684]: DEBUG nova.virt.hardware [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2712.336978] env[62684]: DEBUG nova.virt.hardware [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2712.337149] env[62684]: DEBUG nova.virt.hardware [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2712.337365] env[62684]: DEBUG nova.virt.hardware [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2712.337529] env[62684]: DEBUG nova.virt.hardware [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2712.337701] env[62684]: DEBUG nova.virt.hardware [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2712.337872] env[62684]: DEBUG nova.virt.hardware [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2712.338067] env[62684]: DEBUG nova.virt.hardware [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2712.343079] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-17481165-d4a9-484a-866a-a4c5508dd980 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2712.359544] env[62684]: DEBUG oslo_vmware.api [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2712.359544] env[62684]: value = "task-2053986" [ 2712.359544] env[62684]: _type = "Task" [ 2712.359544] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2712.368978] env[62684]: DEBUG oslo_vmware.api [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053986, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2712.872419] env[62684]: DEBUG oslo_vmware.api [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053986, 'name': ReconfigVM_Task, 'duration_secs': 0.185091} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2712.872877] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Updating instance 'd338d036-f969-41d4-8986-62b043e5ad2f' progress to 33 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2713.381502] env[62684]: DEBUG nova.virt.hardware [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2713.381828] env[62684]: DEBUG nova.virt.hardware [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2713.381935] env[62684]: DEBUG nova.virt.hardware [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2713.382113] env[62684]: DEBUG nova.virt.hardware [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2713.382279] env[62684]: DEBUG nova.virt.hardware [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2713.382433] env[62684]: DEBUG nova.virt.hardware [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2713.382641] env[62684]: DEBUG nova.virt.hardware [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2713.382805] env[62684]: DEBUG nova.virt.hardware [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2713.382978] env[62684]: DEBUG nova.virt.hardware [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2713.383164] env[62684]: DEBUG nova.virt.hardware [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2713.383344] env[62684]: DEBUG nova.virt.hardware [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2713.388645] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Reconfiguring VM instance instance-0000007b to detach disk 2000 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2713.388928] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a1de93da-8e1f-4626-a534-cc4a9c9f5834 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2713.407727] env[62684]: DEBUG oslo_vmware.api [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2713.407727] env[62684]: value = "task-2053987" [ 2713.407727] env[62684]: _type = "Task" [ 2713.407727] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2713.414966] env[62684]: DEBUG oslo_vmware.api [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053987, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2713.917659] env[62684]: DEBUG oslo_vmware.api [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053987, 'name': ReconfigVM_Task, 'duration_secs': 0.174528} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2713.918067] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Reconfigured VM instance instance-0000007b to detach disk 2000 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2713.918698] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be15ac75-e8cc-469e-99fc-960803af2d87 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2713.942567] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] d338d036-f969-41d4-8986-62b043e5ad2f/d338d036-f969-41d4-8986-62b043e5ad2f.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2713.943114] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-095b31b3-fc87-4634-ae47-645f2089cc6f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2713.960467] env[62684]: DEBUG oslo_vmware.api [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2713.960467] env[62684]: value = "task-2053988" [ 2713.960467] env[62684]: _type = "Task" [ 2713.960467] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2713.967757] env[62684]: DEBUG oslo_vmware.api [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053988, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2714.470036] env[62684]: DEBUG oslo_vmware.api [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053988, 'name': ReconfigVM_Task, 'duration_secs': 0.282207} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2714.470325] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Reconfigured VM instance instance-0000007b to attach disk [datastore1] d338d036-f969-41d4-8986-62b043e5ad2f/d338d036-f969-41d4-8986-62b043e5ad2f.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2714.470609] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Updating instance 'd338d036-f969-41d4-8986-62b043e5ad2f' progress to 50 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2714.803735] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2714.803954] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Cleaning up deleted instances with incomplete migration {{(pid=62684) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 2714.977491] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c9a291a-9691-46eb-b0fe-a74f46de4b26 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2714.999958] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c762ce3-6568-428f-932a-2869ba9ce547 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2715.021246] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Updating instance 'd338d036-f969-41d4-8986-62b043e5ad2f' progress to 67 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2716.656438] env[62684]: DEBUG nova.network.neutron [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Port b2731d5e-8639-4c22-bcd9-a86b875143cd binding to destination host cpu-1 is already ACTIVE {{(pid=62684) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2717.678407] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "d338d036-f969-41d4-8986-62b043e5ad2f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2717.678834] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "d338d036-f969-41d4-8986-62b043e5ad2f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2717.678834] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "d338d036-f969-41d4-8986-62b043e5ad2f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2718.713798] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "refresh_cache-d338d036-f969-41d4-8986-62b043e5ad2f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2718.714037] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquired lock "refresh_cache-d338d036-f969-41d4-8986-62b043e5ad2f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2718.714202] env[62684]: DEBUG nova.network.neutron [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2719.426069] env[62684]: DEBUG nova.network.neutron [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Updating instance_info_cache with network_info: [{"id": "b2731d5e-8639-4c22-bcd9-a86b875143cd", "address": "fa:16:3e:0a:5b:88", "network": {"id": "93f58a85-0f77-4adc-88c5-bee1aa383535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1446072689-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21534190adb0460e9a74363ae059a59d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2731d5e-86", "ovs_interfaceid": "b2731d5e-8639-4c22-bcd9-a86b875143cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2719.928561] env[62684]: DEBUG oslo_concurrency.lockutils [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Releasing lock "refresh_cache-d338d036-f969-41d4-8986-62b043e5ad2f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2720.438017] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54780242-6477-4f80-9f30-50b3529c5ee2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2720.445160] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5be8bb6-d270-4c04-b21d-d184d0892bee {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2721.541644] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-023f4c3c-1fb4-4f22-82f9-e863e8b1a386 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2721.563585] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67b40260-ace4-42c8-b4ef-0f54f1dc0ea1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2721.570331] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Updating instance 'd338d036-f969-41d4-8986-62b043e5ad2f' progress to 83 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2722.076622] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2722.076986] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ebb08ce1-596a-49ae-b561-375701e0828e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2722.084006] env[62684]: DEBUG oslo_vmware.api [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2722.084006] env[62684]: value = "task-2053989" [ 2722.084006] env[62684]: _type = "Task" [ 2722.084006] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2722.095104] env[62684]: DEBUG oslo_vmware.api [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053989, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2722.594206] env[62684]: DEBUG oslo_vmware.api [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053989, 'name': PowerOnVM_Task, 'duration_secs': 0.377199} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2722.594622] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2722.594690] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-2a2ea95b-f8bd-4783-bb7c-3456ab91a5c3 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Updating instance 'd338d036-f969-41d4-8986-62b043e5ad2f' progress to 100 {{(pid=62684) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2724.592736] env[62684]: DEBUG nova.network.neutron [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Port b2731d5e-8639-4c22-bcd9-a86b875143cd binding to destination host cpu-1 is already ACTIVE {{(pid=62684) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2724.593082] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "refresh_cache-d338d036-f969-41d4-8986-62b043e5ad2f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2724.593333] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquired lock "refresh_cache-d338d036-f969-41d4-8986-62b043e5ad2f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2724.593449] env[62684]: DEBUG nova.network.neutron [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2725.303322] env[62684]: DEBUG nova.network.neutron [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Updating instance_info_cache with network_info: [{"id": "b2731d5e-8639-4c22-bcd9-a86b875143cd", "address": "fa:16:3e:0a:5b:88", "network": {"id": "93f58a85-0f77-4adc-88c5-bee1aa383535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1446072689-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21534190adb0460e9a74363ae059a59d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2731d5e-86", "ovs_interfaceid": "b2731d5e-8639-4c22-bcd9-a86b875143cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2725.806607] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Releasing lock "refresh_cache-d338d036-f969-41d4-8986-62b043e5ad2f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2726.310270] env[62684]: DEBUG nova.compute.manager [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62684) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:898}} [ 2727.406695] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2727.407060] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2727.909627] env[62684]: DEBUG nova.objects.instance [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lazy-loading 'migration_context' on Instance uuid d338d036-f969-41d4-8986-62b043e5ad2f {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2728.459540] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-100fcb67-be43-4509-99f0-a7efe1796a73 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2728.467664] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4465e2ec-1b0b-4263-92af-ab0940fc6ffa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2728.499210] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e487e162-a7b9-48f7-a6c4-036455950668 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2728.507737] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ebfcee9-bfa2-4adb-a349-c4c43f2b67de {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2728.521554] env[62684]: DEBUG nova.compute.provider_tree [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2729.025272] env[62684]: DEBUG nova.scheduler.client.report [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2730.036612] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.629s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2731.570872] env[62684]: INFO nova.compute.manager [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Swapping old allocation on dict_keys(['c23c281e-ec1f-4876-972e-a98655f2084f']) held by migration 1c62b14f-25a3-44a9-8ebe-c27b0b3cf92e for instance [ 2731.592085] env[62684]: DEBUG nova.scheduler.client.report [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Overwriting current allocation {'allocations': {'c23c281e-ec1f-4876-972e-a98655f2084f': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 196}}, 'project_id': '21534190adb0460e9a74363ae059a59d', 'user_id': '22544927e67845a69c8ac324918f2e93', 'consumer_generation': 1} on consumer d338d036-f969-41d4-8986-62b043e5ad2f {{(pid=62684) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 2731.668134] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "refresh_cache-d338d036-f969-41d4-8986-62b043e5ad2f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2731.668346] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquired lock "refresh_cache-d338d036-f969-41d4-8986-62b043e5ad2f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2731.668530] env[62684]: DEBUG nova.network.neutron [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2732.384440] env[62684]: DEBUG nova.network.neutron [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Updating instance_info_cache with network_info: [{"id": "b2731d5e-8639-4c22-bcd9-a86b875143cd", "address": "fa:16:3e:0a:5b:88", "network": {"id": "93f58a85-0f77-4adc-88c5-bee1aa383535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1446072689-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21534190adb0460e9a74363ae059a59d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2731d5e-86", "ovs_interfaceid": "b2731d5e-8639-4c22-bcd9-a86b875143cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2732.887763] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Releasing lock "refresh_cache-d338d036-f969-41d4-8986-62b043e5ad2f" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2732.890181] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d699ca34-9284-4f19-81ea-4da5ae137da2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2732.897316] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc3d02b6-2d61-4273-9c45-109fb1b02bc6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2733.978294] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2733.978641] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1143e4da-a3a8-4b6f-967d-9678f783a812 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2733.988087] env[62684]: DEBUG oslo_vmware.api [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2733.988087] env[62684]: value = "task-2053990" [ 2733.988087] env[62684]: _type = "Task" [ 2733.988087] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2733.995997] env[62684]: DEBUG oslo_vmware.api [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053990, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2734.498145] env[62684]: DEBUG oslo_vmware.api [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053990, 'name': PowerOffVM_Task, 'duration_secs': 0.20124} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2734.498422] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2734.499072] env[62684]: DEBUG nova.virt.hardware [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2734.499305] env[62684]: DEBUG nova.virt.hardware [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2734.499466] env[62684]: DEBUG nova.virt.hardware [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2734.499654] env[62684]: DEBUG nova.virt.hardware [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2734.499806] env[62684]: DEBUG nova.virt.hardware [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2734.499958] env[62684]: DEBUG nova.virt.hardware [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2734.500182] env[62684]: DEBUG nova.virt.hardware [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2734.500349] env[62684]: DEBUG nova.virt.hardware [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2734.500521] env[62684]: DEBUG nova.virt.hardware [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2734.500689] env[62684]: DEBUG nova.virt.hardware [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2734.500872] env[62684]: DEBUG nova.virt.hardware [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2734.505695] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-534f1f65-0589-49a8-8dc2-51f5ef8ce241 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2734.520301] env[62684]: DEBUG oslo_vmware.api [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2734.520301] env[62684]: value = "task-2053991" [ 2734.520301] env[62684]: _type = "Task" [ 2734.520301] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2734.527955] env[62684]: DEBUG oslo_vmware.api [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053991, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2735.030684] env[62684]: DEBUG oslo_vmware.api [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053991, 'name': ReconfigVM_Task, 'duration_secs': 0.141248} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2735.031487] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52ff5212-7086-49f8-ac59-a69ac2d899e3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2735.052031] env[62684]: DEBUG nova.virt.hardware [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2735.052303] env[62684]: DEBUG nova.virt.hardware [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2735.052471] env[62684]: DEBUG nova.virt.hardware [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2735.052665] env[62684]: DEBUG nova.virt.hardware [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2735.052817] env[62684]: DEBUG nova.virt.hardware [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2735.053231] env[62684]: DEBUG nova.virt.hardware [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2735.053231] env[62684]: DEBUG nova.virt.hardware [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2735.053358] env[62684]: DEBUG nova.virt.hardware [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2735.053511] env[62684]: DEBUG nova.virt.hardware [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2735.053680] env[62684]: DEBUG nova.virt.hardware [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2735.053860] env[62684]: DEBUG nova.virt.hardware [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2735.054634] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca0ec01b-1702-4c90-9e27-f049a81979e9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2735.060072] env[62684]: DEBUG oslo_vmware.api [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2735.060072] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52774575-4939-c3b7-87bf-abcbc7b029fe" [ 2735.060072] env[62684]: _type = "Task" [ 2735.060072] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2735.067465] env[62684]: DEBUG oslo_vmware.api [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52774575-4939-c3b7-87bf-abcbc7b029fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2735.570023] env[62684]: DEBUG oslo_vmware.api [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52774575-4939-c3b7-87bf-abcbc7b029fe, 'name': SearchDatastore_Task, 'duration_secs': 0.009222} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2735.575144] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Reconfiguring VM instance instance-0000007b to detach disk 2000 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2735.575423] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5f324ef7-cdde-47ab-85d3-e768fdee1225 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2735.593717] env[62684]: DEBUG oslo_vmware.api [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2735.593717] env[62684]: value = "task-2053992" [ 2735.593717] env[62684]: _type = "Task" [ 2735.593717] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2735.601346] env[62684]: DEBUG oslo_vmware.api [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053992, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2736.104390] env[62684]: DEBUG oslo_vmware.api [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053992, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2736.604376] env[62684]: DEBUG oslo_vmware.api [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053992, 'name': ReconfigVM_Task, 'duration_secs': 0.514191} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2736.604658] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Reconfigured VM instance instance-0000007b to detach disk 2000 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2736.605462] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4a1a755-9d42-4abb-bcc1-a742a046f40f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2736.628784] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] d338d036-f969-41d4-8986-62b043e5ad2f/d338d036-f969-41d4-8986-62b043e5ad2f.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2736.629027] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a22e9c2a-a42f-47cb-bd0a-4d14a46bc185 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2736.645790] env[62684]: DEBUG oslo_vmware.api [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2736.645790] env[62684]: value = "task-2053993" [ 2736.645790] env[62684]: _type = "Task" [ 2736.645790] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2736.652942] env[62684]: DEBUG oslo_vmware.api [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053993, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2737.155197] env[62684]: DEBUG oslo_vmware.api [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053993, 'name': ReconfigVM_Task, 'duration_secs': 0.278042} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2737.155625] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Reconfigured VM instance instance-0000007b to attach disk [datastore1] d338d036-f969-41d4-8986-62b043e5ad2f/d338d036-f969-41d4-8986-62b043e5ad2f.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2737.156369] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f468f93-d002-476e-9279-9b7145bc6066 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2737.176305] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-613b5493-20d5-4f9a-a4bb-1635ef2d2adf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2737.195846] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f1b8e96-358d-413a-9e83-32a172646743 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2737.214984] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca12324c-91a5-47e2-856b-b3598f325feb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2737.221198] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2737.221419] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6fc56b17-b223-4be7-a875-4f840f03c606 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2737.227997] env[62684]: DEBUG oslo_vmware.api [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2737.227997] env[62684]: value = "task-2053994" [ 2737.227997] env[62684]: _type = "Task" [ 2737.227997] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2737.234896] env[62684]: DEBUG oslo_vmware.api [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053994, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2737.744774] env[62684]: DEBUG oslo_vmware.api [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053994, 'name': PowerOnVM_Task, 'duration_secs': 0.358472} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2737.745247] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2738.787368] env[62684]: INFO nova.compute.manager [None req-e5d413a9-17cc-4123-a2e8-ac9530b1e64d tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Updating instance to original state: 'active' [ 2740.306603] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "d338d036-f969-41d4-8986-62b043e5ad2f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2740.306971] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "d338d036-f969-41d4-8986-62b043e5ad2f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2740.307101] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "d338d036-f969-41d4-8986-62b043e5ad2f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2740.307316] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "d338d036-f969-41d4-8986-62b043e5ad2f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2740.307496] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "d338d036-f969-41d4-8986-62b043e5ad2f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2740.309632] env[62684]: INFO nova.compute.manager [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Terminating instance [ 2740.311459] env[62684]: DEBUG nova.compute.manager [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2740.311678] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2740.311919] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6cf26164-f779-4fad-b3ee-445bddd503b5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2740.318870] env[62684]: DEBUG oslo_vmware.api [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2740.318870] env[62684]: value = "task-2053995" [ 2740.318870] env[62684]: _type = "Task" [ 2740.318870] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2740.326784] env[62684]: DEBUG oslo_vmware.api [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053995, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2740.828797] env[62684]: DEBUG oslo_vmware.api [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053995, 'name': PowerOffVM_Task, 'duration_secs': 0.281724} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2740.829121] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2740.829286] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Volume detach. Driver type: vmdk {{(pid=62684) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2740.829485] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421443', 'volume_id': 'ff65c5b8-13bd-4a60-ab40-fbe61d0e4057', 'name': 'volume-ff65c5b8-13bd-4a60-ab40-fbe61d0e4057', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'd338d036-f969-41d4-8986-62b043e5ad2f', 'attached_at': '2025-01-10T08:02:53.000000', 'detached_at': '', 'volume_id': 'ff65c5b8-13bd-4a60-ab40-fbe61d0e4057', 'serial': 'ff65c5b8-13bd-4a60-ab40-fbe61d0e4057'} {{(pid=62684) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2740.830233] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1d661e0-b937-4f49-9ee3-fff94f2933ff {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2740.851514] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d3d490e-f535-4061-852b-a8651ca21a68 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2740.857799] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dff245a5-072d-4ca1-9852-b94884d080e0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2740.877786] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f914311-4c5a-465b-b23b-bf9650cff2e6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2740.891185] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] The volume has not been displaced from its original location: [datastore1] volume-ff65c5b8-13bd-4a60-ab40-fbe61d0e4057/volume-ff65c5b8-13bd-4a60-ab40-fbe61d0e4057.vmdk. No consolidation needed. {{(pid=62684) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2740.896261] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Reconfiguring VM instance instance-0000007b to detach disk 2001 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2740.896498] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2ec8232a-815c-43df-b730-93bf9f8ebe19 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2740.912725] env[62684]: DEBUG oslo_vmware.api [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2740.912725] env[62684]: value = "task-2053996" [ 2740.912725] env[62684]: _type = "Task" [ 2740.912725] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2740.919722] env[62684]: DEBUG oslo_vmware.api [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053996, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2741.424364] env[62684]: DEBUG oslo_vmware.api [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053996, 'name': ReconfigVM_Task, 'duration_secs': 0.20881} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2741.424755] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Reconfigured VM instance instance-0000007b to detach disk 2001 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2741.429181] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-82a729fc-8a40-4d43-b5a3-6d2ce7920fff {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2741.446501] env[62684]: DEBUG oslo_vmware.api [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2741.446501] env[62684]: value = "task-2053997" [ 2741.446501] env[62684]: _type = "Task" [ 2741.446501] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2741.453742] env[62684]: DEBUG oslo_vmware.api [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053997, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2741.956618] env[62684]: DEBUG oslo_vmware.api [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053997, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2742.456945] env[62684]: DEBUG oslo_vmware.api [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053997, 'name': ReconfigVM_Task, 'duration_secs': 0.747921} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2742.457343] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421443', 'volume_id': 'ff65c5b8-13bd-4a60-ab40-fbe61d0e4057', 'name': 'volume-ff65c5b8-13bd-4a60-ab40-fbe61d0e4057', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'd338d036-f969-41d4-8986-62b043e5ad2f', 'attached_at': '2025-01-10T08:02:53.000000', 'detached_at': '', 'volume_id': 'ff65c5b8-13bd-4a60-ab40-fbe61d0e4057', 'serial': 'ff65c5b8-13bd-4a60-ab40-fbe61d0e4057'} {{(pid=62684) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2742.457585] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2742.458349] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c31347e-6fb6-44a5-9d1e-2f0b3e357e63 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2742.464510] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2742.464729] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a262f653-7a06-479c-9a5c-f71fee53f3b5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2743.804894] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2744.308471] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2744.308713] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2744.308875] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2744.309038] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62684) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2744.309904] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f10dbac-8d0a-406f-8eb8-26a2660a38c0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2744.318267] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1663383d-d736-41e3-9558-8b7bd88a52e0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2744.331697] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5fc1a9b-b481-4ea2-825c-1835eb723600 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2744.337776] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8cd2ee7-f0a2-4fb0-ac9f-f50959f37f84 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2744.366936] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181154MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=62684) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2744.367087] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2744.367273] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2745.440591] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance d338d036-f969-41d4-8986-62b043e5ad2f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2745.440855] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2745.440947] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2745.464390] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7011c6ba-e17c-4687-a240-6501bc1771f6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2745.471503] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e02a696-ed65-4727-a398-6e1d5311e403 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2745.499943] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4674ed64-fccb-4be4-ab3a-decbc405f5f6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2745.506527] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4d8e33f-cb8a-4d2f-a163-c9d63a443932 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2745.519913] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2746.023027] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2746.528152] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62684) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2746.528473] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.161s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2747.800576] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2747.801013] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2747.801013] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Deleting the datastore file [datastore1] d338d036-f969-41d4-8986-62b043e5ad2f {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2747.801210] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-953e4aca-b6a4-405b-bbe5-04bf299f583b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2747.808154] env[62684]: DEBUG oslo_vmware.api [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2747.808154] env[62684]: value = "task-2053999" [ 2747.808154] env[62684]: _type = "Task" [ 2747.808154] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2747.815926] env[62684]: DEBUG oslo_vmware.api [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053999, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2748.317844] env[62684]: DEBUG oslo_vmware.api [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2053999, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152139} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2748.318121] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2748.318321] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2748.318502] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2748.318682] env[62684]: INFO nova.compute.manager [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Took 8.01 seconds to destroy the instance on the hypervisor. [ 2748.318925] env[62684]: DEBUG oslo.service.loopingcall [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2748.319135] env[62684]: DEBUG nova.compute.manager [-] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2748.319235] env[62684]: DEBUG nova.network.neutron [-] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2748.927571] env[62684]: DEBUG nova.compute.manager [req-47712098-eb21-4d63-8168-e7b7aa2ca03b req-dd1f6902-1d3f-48ea-97cf-a77e7b3628e1 service nova] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Received event network-vif-deleted-b2731d5e-8639-4c22-bcd9-a86b875143cd {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2748.927828] env[62684]: INFO nova.compute.manager [req-47712098-eb21-4d63-8168-e7b7aa2ca03b req-dd1f6902-1d3f-48ea-97cf-a77e7b3628e1 service nova] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Neutron deleted interface b2731d5e-8639-4c22-bcd9-a86b875143cd; detaching it from the instance and deleting it from the info cache [ 2748.928082] env[62684]: DEBUG nova.network.neutron [req-47712098-eb21-4d63-8168-e7b7aa2ca03b req-dd1f6902-1d3f-48ea-97cf-a77e7b3628e1 service nova] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2749.024469] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2749.024643] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Starting heal instance info cache {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2749.024944] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Rebuilding the list of instances to heal {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2749.400909] env[62684]: DEBUG nova.network.neutron [-] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2749.429975] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-83c304ec-1e7f-448e-afe9-fe88109279e9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2749.440165] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7820236-5169-40e9-9ef0-1e4a5f8d775e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2749.463133] env[62684]: DEBUG nova.compute.manager [req-47712098-eb21-4d63-8168-e7b7aa2ca03b req-dd1f6902-1d3f-48ea-97cf-a77e7b3628e1 service nova] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Detach interface failed, port_id=b2731d5e-8639-4c22-bcd9-a86b875143cd, reason: Instance d338d036-f969-41d4-8986-62b043e5ad2f could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2749.527757] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Skipping network cache update for instance because it is being deleted. {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9946}} [ 2749.527921] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Didn't find any instances for network info cache update. {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 2749.528169] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2749.528335] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2749.528489] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62684) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2749.903708] env[62684]: INFO nova.compute.manager [-] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Took 1.58 seconds to deallocate network for instance. [ 2750.300890] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2750.301188] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2750.301311] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2750.445333] env[62684]: INFO nova.compute.manager [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: d338d036-f969-41d4-8986-62b043e5ad2f] Took 0.54 seconds to detach 1 volumes for instance. [ 2750.952357] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2750.952636] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2750.952869] env[62684]: DEBUG nova.objects.instance [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lazy-loading 'resources' on Instance uuid d338d036-f969-41d4-8986-62b043e5ad2f {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2751.296269] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2751.487896] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5a329be-03ea-4c25-9abc-b01ea22ac370 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2751.495660] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a5c80c1-b6b8-4252-b743-bf93955520d7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2751.525376] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbacbe75-1a11-466e-add0-bf59d01144e4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2751.532378] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5e4a27f-2f40-4ff3-a60a-0b9f344ed100 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2751.545739] env[62684]: DEBUG nova.compute.provider_tree [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2752.048722] env[62684]: DEBUG nova.scheduler.client.report [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2752.554034] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.601s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2752.573526] env[62684]: INFO nova.scheduler.client.report [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Deleted allocations for instance d338d036-f969-41d4-8986-62b043e5ad2f [ 2753.081611] env[62684]: DEBUG oslo_concurrency.lockutils [None req-3552d24b-655a-46d9-912b-14f7d370a99a tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "d338d036-f969-41d4-8986-62b043e5ad2f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.775s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2753.970373] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "f87499c1-c122-41e1-bb86-21ce5cebc705" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2753.970677] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "f87499c1-c122-41e1-bb86-21ce5cebc705" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2754.473877] env[62684]: DEBUG nova.compute.manager [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2754.995407] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2754.995682] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2754.997250] env[62684]: INFO nova.compute.claims [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2755.295572] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2756.031435] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c12a9afa-778d-446d-b1f2-bf9b137dc5b9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2756.039184] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e143a66-81cf-463c-bf34-0c6c84064586 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2756.068815] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20b374b5-8e1b-467f-ad6e-5f245954caa1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2756.075773] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-428bbfaf-ef04-4fd0-bfb6-f0b7be92ee34 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2756.088630] env[62684]: DEBUG nova.compute.provider_tree [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2756.592186] env[62684]: DEBUG nova.scheduler.client.report [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2757.096817] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.101s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2757.097392] env[62684]: DEBUG nova.compute.manager [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2757.300222] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2757.603371] env[62684]: DEBUG nova.compute.utils [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2757.606224] env[62684]: DEBUG nova.compute.manager [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2757.606667] env[62684]: DEBUG nova.network.neutron [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2757.657368] env[62684]: DEBUG nova.policy [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '22544927e67845a69c8ac324918f2e93', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '21534190adb0460e9a74363ae059a59d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2757.917870] env[62684]: DEBUG nova.network.neutron [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Successfully created port: 6e533f20-209a-4c15-b94c-a5c1008f13ad {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2758.107444] env[62684]: DEBUG nova.compute.manager [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2759.118183] env[62684]: DEBUG nova.compute.manager [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2759.145969] env[62684]: DEBUG nova.virt.hardware [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-10T07:25:26Z,direct_url=,disk_format='vmdk',id=3931321c-cb4c-4b87-8d3a-50e05ea01db2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='060baef1c5f640fda05fe7b750aa2f0a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-10T07:25:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2759.146389] env[62684]: DEBUG nova.virt.hardware [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2759.146643] env[62684]: DEBUG nova.virt.hardware [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2759.146949] env[62684]: DEBUG nova.virt.hardware [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2759.147241] env[62684]: DEBUG nova.virt.hardware [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2759.147639] env[62684]: DEBUG nova.virt.hardware [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2759.147881] env[62684]: DEBUG nova.virt.hardware [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2759.148183] env[62684]: DEBUG nova.virt.hardware [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2759.148509] env[62684]: DEBUG nova.virt.hardware [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2759.148829] env[62684]: DEBUG nova.virt.hardware [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2759.149165] env[62684]: DEBUG nova.virt.hardware [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2759.150860] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c556872-8b2c-4823-8ad5-0f10b801e7d9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2759.163026] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-465e6c87-652c-46e0-9962-ed5fb44f500b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2759.280971] env[62684]: DEBUG nova.compute.manager [req-be69b533-a14b-4445-816f-d34160f5d637 req-3e61b8ad-c84c-4863-a753-932b346a35fe service nova] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Received event network-vif-plugged-6e533f20-209a-4c15-b94c-a5c1008f13ad {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2759.281234] env[62684]: DEBUG oslo_concurrency.lockutils [req-be69b533-a14b-4445-816f-d34160f5d637 req-3e61b8ad-c84c-4863-a753-932b346a35fe service nova] Acquiring lock "f87499c1-c122-41e1-bb86-21ce5cebc705-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2759.281455] env[62684]: DEBUG oslo_concurrency.lockutils [req-be69b533-a14b-4445-816f-d34160f5d637 req-3e61b8ad-c84c-4863-a753-932b346a35fe service nova] Lock "f87499c1-c122-41e1-bb86-21ce5cebc705-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2759.281631] env[62684]: DEBUG oslo_concurrency.lockutils [req-be69b533-a14b-4445-816f-d34160f5d637 req-3e61b8ad-c84c-4863-a753-932b346a35fe service nova] Lock "f87499c1-c122-41e1-bb86-21ce5cebc705-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2759.281806] env[62684]: DEBUG nova.compute.manager [req-be69b533-a14b-4445-816f-d34160f5d637 req-3e61b8ad-c84c-4863-a753-932b346a35fe service nova] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] No waiting events found dispatching network-vif-plugged-6e533f20-209a-4c15-b94c-a5c1008f13ad {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2759.281980] env[62684]: WARNING nova.compute.manager [req-be69b533-a14b-4445-816f-d34160f5d637 req-3e61b8ad-c84c-4863-a753-932b346a35fe service nova] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Received unexpected event network-vif-plugged-6e533f20-209a-4c15-b94c-a5c1008f13ad for instance with vm_state building and task_state spawning. [ 2759.362929] env[62684]: DEBUG nova.network.neutron [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Successfully updated port: 6e533f20-209a-4c15-b94c-a5c1008f13ad {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2759.866102] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "refresh_cache-f87499c1-c122-41e1-bb86-21ce5cebc705" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2759.866268] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquired lock "refresh_cache-f87499c1-c122-41e1-bb86-21ce5cebc705" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2759.866429] env[62684]: DEBUG nova.network.neutron [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2760.399084] env[62684]: DEBUG nova.network.neutron [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2760.519710] env[62684]: DEBUG nova.network.neutron [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Updating instance_info_cache with network_info: [{"id": "6e533f20-209a-4c15-b94c-a5c1008f13ad", "address": "fa:16:3e:e3:0d:d8", "network": {"id": "93f58a85-0f77-4adc-88c5-bee1aa383535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1446072689-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21534190adb0460e9a74363ae059a59d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e533f20-20", "ovs_interfaceid": "6e533f20-209a-4c15-b94c-a5c1008f13ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2761.022615] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Releasing lock "refresh_cache-f87499c1-c122-41e1-bb86-21ce5cebc705" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2761.022966] env[62684]: DEBUG nova.compute.manager [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Instance network_info: |[{"id": "6e533f20-209a-4c15-b94c-a5c1008f13ad", "address": "fa:16:3e:e3:0d:d8", "network": {"id": "93f58a85-0f77-4adc-88c5-bee1aa383535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1446072689-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21534190adb0460e9a74363ae059a59d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e533f20-20", "ovs_interfaceid": "6e533f20-209a-4c15-b94c-a5c1008f13ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2761.023460] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e3:0d:d8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7874ee7f-20c7-4bd8-a750-ed489e9acc65', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6e533f20-209a-4c15-b94c-a5c1008f13ad', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2761.030839] env[62684]: DEBUG oslo.service.loopingcall [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2761.031057] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2761.031292] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-70180f37-071d-4db0-bb8f-4b38664061a8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2761.050827] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2761.050827] env[62684]: value = "task-2054000" [ 2761.050827] env[62684]: _type = "Task" [ 2761.050827] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2761.059324] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2054000, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2761.307067] env[62684]: DEBUG nova.compute.manager [req-7e5814d7-7954-49af-bbcb-2e236fa7ff16 req-67ad6411-5ad0-4b93-ad33-a5563e60ed3f service nova] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Received event network-changed-6e533f20-209a-4c15-b94c-a5c1008f13ad {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2761.307281] env[62684]: DEBUG nova.compute.manager [req-7e5814d7-7954-49af-bbcb-2e236fa7ff16 req-67ad6411-5ad0-4b93-ad33-a5563e60ed3f service nova] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Refreshing instance network info cache due to event network-changed-6e533f20-209a-4c15-b94c-a5c1008f13ad. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2761.307533] env[62684]: DEBUG oslo_concurrency.lockutils [req-7e5814d7-7954-49af-bbcb-2e236fa7ff16 req-67ad6411-5ad0-4b93-ad33-a5563e60ed3f service nova] Acquiring lock "refresh_cache-f87499c1-c122-41e1-bb86-21ce5cebc705" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2761.307699] env[62684]: DEBUG oslo_concurrency.lockutils [req-7e5814d7-7954-49af-bbcb-2e236fa7ff16 req-67ad6411-5ad0-4b93-ad33-a5563e60ed3f service nova] Acquired lock "refresh_cache-f87499c1-c122-41e1-bb86-21ce5cebc705" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2761.307872] env[62684]: DEBUG nova.network.neutron [req-7e5814d7-7954-49af-bbcb-2e236fa7ff16 req-67ad6411-5ad0-4b93-ad33-a5563e60ed3f service nova] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Refreshing network info cache for port 6e533f20-209a-4c15-b94c-a5c1008f13ad {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2761.561377] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2054000, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2761.989393] env[62684]: DEBUG nova.network.neutron [req-7e5814d7-7954-49af-bbcb-2e236fa7ff16 req-67ad6411-5ad0-4b93-ad33-a5563e60ed3f service nova] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Updated VIF entry in instance network info cache for port 6e533f20-209a-4c15-b94c-a5c1008f13ad. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2761.989816] env[62684]: DEBUG nova.network.neutron [req-7e5814d7-7954-49af-bbcb-2e236fa7ff16 req-67ad6411-5ad0-4b93-ad33-a5563e60ed3f service nova] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Updating instance_info_cache with network_info: [{"id": "6e533f20-209a-4c15-b94c-a5c1008f13ad", "address": "fa:16:3e:e3:0d:d8", "network": {"id": "93f58a85-0f77-4adc-88c5-bee1aa383535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1446072689-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21534190adb0460e9a74363ae059a59d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e533f20-20", "ovs_interfaceid": "6e533f20-209a-4c15-b94c-a5c1008f13ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2762.061548] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2054000, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2762.492795] env[62684]: DEBUG oslo_concurrency.lockutils [req-7e5814d7-7954-49af-bbcb-2e236fa7ff16 req-67ad6411-5ad0-4b93-ad33-a5563e60ed3f service nova] Releasing lock "refresh_cache-f87499c1-c122-41e1-bb86-21ce5cebc705" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2762.562653] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2054000, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2763.062998] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2054000, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2763.563575] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2054000, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2764.064333] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2054000, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2764.564682] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2054000, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2765.066175] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2054000, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2765.567904] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2054000, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2766.066511] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2054000, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2766.567398] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2054000, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2767.067964] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2054000, 'name': CreateVM_Task, 'duration_secs': 6.000935} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2767.069037] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2767.069386] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2767.069568] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2767.069885] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2767.070160] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19608581-d06b-4436-8894-6b5b77c0b4f7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2767.074517] env[62684]: DEBUG oslo_vmware.api [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2767.074517] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52bf12cd-8612-8cf7-b540-afb0e99583ee" [ 2767.074517] env[62684]: _type = "Task" [ 2767.074517] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2767.081541] env[62684]: DEBUG oslo_vmware.api [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52bf12cd-8612-8cf7-b540-afb0e99583ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2767.584787] env[62684]: DEBUG oslo_vmware.api [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52bf12cd-8612-8cf7-b540-afb0e99583ee, 'name': SearchDatastore_Task, 'duration_secs': 0.009624} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2767.585129] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2767.585396] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Processing image 3931321c-cb4c-4b87-8d3a-50e05ea01db2 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2767.585639] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2767.585792] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2767.585979] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2767.586261] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e706511f-03ae-43a3-9868-215973d2ba86 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2767.594576] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2767.594760] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2767.595506] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9fe1797-08ab-48a9-82ae-406d7e0944fe {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2767.600247] env[62684]: DEBUG oslo_vmware.api [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2767.600247] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523f5451-7079-fb7e-ad37-1d8fc86e083e" [ 2767.600247] env[62684]: _type = "Task" [ 2767.600247] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2767.606948] env[62684]: DEBUG oslo_vmware.api [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523f5451-7079-fb7e-ad37-1d8fc86e083e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2768.110704] env[62684]: DEBUG oslo_vmware.api [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523f5451-7079-fb7e-ad37-1d8fc86e083e, 'name': SearchDatastore_Task, 'duration_secs': 0.008487} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2768.111557] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99ec5664-c864-4e50-bdc7-32b71afd3755 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2768.116659] env[62684]: DEBUG oslo_vmware.api [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2768.116659] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5242e2e5-37fa-5197-2462-fd4bb619a264" [ 2768.116659] env[62684]: _type = "Task" [ 2768.116659] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2768.124373] env[62684]: DEBUG oslo_vmware.api [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5242e2e5-37fa-5197-2462-fd4bb619a264, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2768.630748] env[62684]: DEBUG oslo_vmware.api [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5242e2e5-37fa-5197-2462-fd4bb619a264, 'name': SearchDatastore_Task, 'duration_secs': 0.009354} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2768.631082] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2768.631565] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] f87499c1-c122-41e1-bb86-21ce5cebc705/f87499c1-c122-41e1-bb86-21ce5cebc705.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2768.631949] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-928d52b8-4e90-4f1b-8c6c-b00a041d78fe {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2768.641188] env[62684]: DEBUG oslo_vmware.api [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2768.641188] env[62684]: value = "task-2054001" [ 2768.641188] env[62684]: _type = "Task" [ 2768.641188] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2768.651076] env[62684]: DEBUG oslo_vmware.api [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054001, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2769.151027] env[62684]: DEBUG oslo_vmware.api [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054001, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.427141} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2769.151389] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3931321c-cb4c-4b87-8d3a-50e05ea01db2/3931321c-cb4c-4b87-8d3a-50e05ea01db2.vmdk to [datastore1] f87499c1-c122-41e1-bb86-21ce5cebc705/f87499c1-c122-41e1-bb86-21ce5cebc705.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2769.151509] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Extending root virtual disk to 1048576 {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2769.151765] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9f04cf1c-4991-46ef-8ea1-8a8b19a76ff6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2769.157805] env[62684]: DEBUG oslo_vmware.api [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2769.157805] env[62684]: value = "task-2054002" [ 2769.157805] env[62684]: _type = "Task" [ 2769.157805] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2769.165400] env[62684]: DEBUG oslo_vmware.api [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054002, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2769.667350] env[62684]: DEBUG oslo_vmware.api [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054002, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058698} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2769.667637] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Extended root virtual disk {{(pid=62684) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2769.668406] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-236d66de-0382-415a-81ad-3b1234bb3bf8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2769.689808] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] f87499c1-c122-41e1-bb86-21ce5cebc705/f87499c1-c122-41e1-bb86-21ce5cebc705.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2769.690079] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8feee0fd-5709-488b-9607-ade64c2281d3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2769.710269] env[62684]: DEBUG oslo_vmware.api [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2769.710269] env[62684]: value = "task-2054003" [ 2769.710269] env[62684]: _type = "Task" [ 2769.710269] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2769.719152] env[62684]: DEBUG oslo_vmware.api [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054003, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2770.220807] env[62684]: DEBUG oslo_vmware.api [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054003, 'name': ReconfigVM_Task, 'duration_secs': 0.276693} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2770.221204] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Reconfigured VM instance instance-0000007c to attach disk [datastore1] f87499c1-c122-41e1-bb86-21ce5cebc705/f87499c1-c122-41e1-bb86-21ce5cebc705.vmdk or device None with type sparse {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2770.221834] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dc351674-5488-4902-ad58-6ad3bcb3b171 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2770.228935] env[62684]: DEBUG oslo_vmware.api [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2770.228935] env[62684]: value = "task-2054004" [ 2770.228935] env[62684]: _type = "Task" [ 2770.228935] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2770.236436] env[62684]: DEBUG oslo_vmware.api [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054004, 'name': Rename_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2770.739089] env[62684]: DEBUG oslo_vmware.api [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054004, 'name': Rename_Task, 'duration_secs': 0.129885} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2770.739393] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2770.739645] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8eaafdc6-5690-42a1-aa6d-2fb1ac3df7da {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2770.745448] env[62684]: DEBUG oslo_vmware.api [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2770.745448] env[62684]: value = "task-2054005" [ 2770.745448] env[62684]: _type = "Task" [ 2770.745448] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2770.752640] env[62684]: DEBUG oslo_vmware.api [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054005, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2771.255439] env[62684]: DEBUG oslo_vmware.api [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054005, 'name': PowerOnVM_Task, 'duration_secs': 0.46409} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2771.255841] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2771.255928] env[62684]: INFO nova.compute.manager [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Took 12.14 seconds to spawn the instance on the hypervisor. [ 2771.256122] env[62684]: DEBUG nova.compute.manager [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2771.256918] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e32d2f7-5c2e-43af-9dd5-8de44e16f513 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2771.774401] env[62684]: INFO nova.compute.manager [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Took 16.80 seconds to build instance. [ 2772.276325] env[62684]: DEBUG oslo_concurrency.lockutils [None req-8a8b099a-41c5-4385-b166-db7c3a578166 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "f87499c1-c122-41e1-bb86-21ce5cebc705" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.305s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2772.440218] env[62684]: DEBUG nova.compute.manager [req-a173d038-cf71-4534-86b2-1e4073d71a7d req-51190d11-f290-4f2d-9ee3-03cbaf203e4f service nova] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Received event network-changed-6e533f20-209a-4c15-b94c-a5c1008f13ad {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2772.440436] env[62684]: DEBUG nova.compute.manager [req-a173d038-cf71-4534-86b2-1e4073d71a7d req-51190d11-f290-4f2d-9ee3-03cbaf203e4f service nova] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Refreshing instance network info cache due to event network-changed-6e533f20-209a-4c15-b94c-a5c1008f13ad. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2772.440674] env[62684]: DEBUG oslo_concurrency.lockutils [req-a173d038-cf71-4534-86b2-1e4073d71a7d req-51190d11-f290-4f2d-9ee3-03cbaf203e4f service nova] Acquiring lock "refresh_cache-f87499c1-c122-41e1-bb86-21ce5cebc705" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2772.440826] env[62684]: DEBUG oslo_concurrency.lockutils [req-a173d038-cf71-4534-86b2-1e4073d71a7d req-51190d11-f290-4f2d-9ee3-03cbaf203e4f service nova] Acquired lock "refresh_cache-f87499c1-c122-41e1-bb86-21ce5cebc705" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2772.440992] env[62684]: DEBUG nova.network.neutron [req-a173d038-cf71-4534-86b2-1e4073d71a7d req-51190d11-f290-4f2d-9ee3-03cbaf203e4f service nova] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Refreshing network info cache for port 6e533f20-209a-4c15-b94c-a5c1008f13ad {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2773.144366] env[62684]: DEBUG nova.network.neutron [req-a173d038-cf71-4534-86b2-1e4073d71a7d req-51190d11-f290-4f2d-9ee3-03cbaf203e4f service nova] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Updated VIF entry in instance network info cache for port 6e533f20-209a-4c15-b94c-a5c1008f13ad. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2773.144747] env[62684]: DEBUG nova.network.neutron [req-a173d038-cf71-4534-86b2-1e4073d71a7d req-51190d11-f290-4f2d-9ee3-03cbaf203e4f service nova] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Updating instance_info_cache with network_info: [{"id": "6e533f20-209a-4c15-b94c-a5c1008f13ad", "address": "fa:16:3e:e3:0d:d8", "network": {"id": "93f58a85-0f77-4adc-88c5-bee1aa383535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1446072689-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21534190adb0460e9a74363ae059a59d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e533f20-20", "ovs_interfaceid": "6e533f20-209a-4c15-b94c-a5c1008f13ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2773.647783] env[62684]: DEBUG oslo_concurrency.lockutils [req-a173d038-cf71-4534-86b2-1e4073d71a7d req-51190d11-f290-4f2d-9ee3-03cbaf203e4f service nova] Releasing lock "refresh_cache-f87499c1-c122-41e1-bb86-21ce5cebc705" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2803.302105] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2803.807415] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2803.807689] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2803.807823] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2803.807976] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62684) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2803.808966] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b66ee1a-7074-4d3e-9d63-e6265413f4be {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2803.817720] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ebe3d9a-479a-48f3-9211-7454cbd32e0a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2803.831531] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60d92cb0-55e1-449a-9715-73aae2e71dba {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2803.837725] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28e81092-a76e-45bd-99ea-cab6063dcab6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2803.865666] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181274MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=62684) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2803.865818] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2803.865991] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2804.892527] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance f87499c1-c122-41e1-bb86-21ce5cebc705 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2804.892820] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2804.892871] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2804.918036] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74d75f95-c71b-4662-9dc8-27d8fcabe15e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2804.925160] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09833471-38bc-4f43-9b31-eb2e662ab9c6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2804.955165] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36d9c4d1-1be0-4baa-b65d-bd2d390cfaf1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2804.961916] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1608c9e5-0627-41e0-aa8a-2d1990a44d43 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2804.974495] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2805.477413] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2805.981958] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62684) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2805.982375] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.116s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2809.712116] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "f87499c1-c122-41e1-bb86-21ce5cebc705" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2809.712496] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "f87499c1-c122-41e1-bb86-21ce5cebc705" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2809.712624] env[62684]: INFO nova.compute.manager [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Shelving [ 2810.220393] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2810.220707] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d06e62d0-8971-4dff-9ed1-067cff94506a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2810.228653] env[62684]: DEBUG oslo_vmware.api [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2810.228653] env[62684]: value = "task-2054006" [ 2810.228653] env[62684]: _type = "Task" [ 2810.228653] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2810.237204] env[62684]: DEBUG oslo_vmware.api [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054006, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2810.739897] env[62684]: DEBUG oslo_vmware.api [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054006, 'name': PowerOffVM_Task, 'duration_secs': 0.197348} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2810.740308] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2810.740948] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d31dedda-d28f-4726-aa53-2f087925ecb4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2810.759192] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6995c2d-0998-4b6c-9dbb-3e4b79877431 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2810.980585] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2810.980782] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Starting heal instance info cache {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2810.980904] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Rebuilding the list of instances to heal {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2811.271308] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Creating Snapshot of the VM instance {{(pid=62684) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2811.271773] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-9efd41c0-51f0-4b2d-84dc-9e1926b8bc60 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2811.279385] env[62684]: DEBUG oslo_vmware.api [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2811.279385] env[62684]: value = "task-2054007" [ 2811.279385] env[62684]: _type = "Task" [ 2811.279385] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2811.287468] env[62684]: DEBUG oslo_vmware.api [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054007, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2811.484809] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "refresh_cache-f87499c1-c122-41e1-bb86-21ce5cebc705" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2811.484967] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired lock "refresh_cache-f87499c1-c122-41e1-bb86-21ce5cebc705" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2811.485142] env[62684]: DEBUG nova.network.neutron [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Forcefully refreshing network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2811.485297] env[62684]: DEBUG nova.objects.instance [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lazy-loading 'info_cache' on Instance uuid f87499c1-c122-41e1-bb86-21ce5cebc705 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2811.789686] env[62684]: DEBUG oslo_vmware.api [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054007, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2812.290057] env[62684]: DEBUG oslo_vmware.api [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054007, 'name': CreateSnapshot_Task, 'duration_secs': 0.670656} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2812.290395] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Created Snapshot of the VM instance {{(pid=62684) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2812.291157] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70c6e285-7357-4895-8803-cc82014e5c8f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2812.807483] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Creating linked-clone VM from snapshot {{(pid=62684) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2812.807877] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-1bf0aa40-6d3b-4f57-bc50-22e38650638c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2812.816366] env[62684]: DEBUG oslo_vmware.api [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2812.816366] env[62684]: value = "task-2054008" [ 2812.816366] env[62684]: _type = "Task" [ 2812.816366] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2812.823755] env[62684]: DEBUG oslo_vmware.api [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054008, 'name': CloneVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2813.209176] env[62684]: DEBUG nova.network.neutron [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Updating instance_info_cache with network_info: [{"id": "6e533f20-209a-4c15-b94c-a5c1008f13ad", "address": "fa:16:3e:e3:0d:d8", "network": {"id": "93f58a85-0f77-4adc-88c5-bee1aa383535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1446072689-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21534190adb0460e9a74363ae059a59d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e533f20-20", "ovs_interfaceid": "6e533f20-209a-4c15-b94c-a5c1008f13ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2813.327419] env[62684]: DEBUG oslo_vmware.api [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054008, 'name': CloneVM_Task} progress is 94%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2813.712012] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Releasing lock "refresh_cache-f87499c1-c122-41e1-bb86-21ce5cebc705" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2813.712259] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Updated the network info_cache for instance {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 2813.712563] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2813.712647] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2813.712778] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2813.712914] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2813.713073] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2813.713201] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62684) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2813.827559] env[62684]: DEBUG oslo_vmware.api [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054008, 'name': CloneVM_Task} progress is 95%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2814.328075] env[62684]: DEBUG oslo_vmware.api [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054008, 'name': CloneVM_Task, 'duration_secs': 1.169748} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2814.328371] env[62684]: INFO nova.virt.vmwareapi.vmops [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Created linked-clone VM from snapshot [ 2814.329101] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03c2c56c-2bed-4e7e-ae28-44f6decf4b07 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2814.336017] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Uploading image 8337f75f-b270-4b19-8b09-2a31e8e43b6e {{(pid=62684) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2814.357025] env[62684]: DEBUG oslo_vmware.rw_handles [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2814.357025] env[62684]: value = "vm-421446" [ 2814.357025] env[62684]: _type = "VirtualMachine" [ 2814.357025] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2814.357025] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-dc46c6a4-3f6a-4ce9-84d4-9f8fed3b7be8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2814.364421] env[62684]: DEBUG oslo_vmware.rw_handles [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lease: (returnval){ [ 2814.364421] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521ed45c-bf02-3923-b7c3-484032f6658b" [ 2814.364421] env[62684]: _type = "HttpNfcLease" [ 2814.364421] env[62684]: } obtained for exporting VM: (result){ [ 2814.364421] env[62684]: value = "vm-421446" [ 2814.364421] env[62684]: _type = "VirtualMachine" [ 2814.364421] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2814.364852] env[62684]: DEBUG oslo_vmware.api [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the lease: (returnval){ [ 2814.364852] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521ed45c-bf02-3923-b7c3-484032f6658b" [ 2814.364852] env[62684]: _type = "HttpNfcLease" [ 2814.364852] env[62684]: } to be ready. {{(pid=62684) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2814.370975] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2814.370975] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521ed45c-bf02-3923-b7c3-484032f6658b" [ 2814.370975] env[62684]: _type = "HttpNfcLease" [ 2814.370975] env[62684]: } is initializing. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2814.873569] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2814.873569] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521ed45c-bf02-3923-b7c3-484032f6658b" [ 2814.873569] env[62684]: _type = "HttpNfcLease" [ 2814.873569] env[62684]: } is ready. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2814.873984] env[62684]: DEBUG oslo_vmware.rw_handles [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2814.873984] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521ed45c-bf02-3923-b7c3-484032f6658b" [ 2814.873984] env[62684]: _type = "HttpNfcLease" [ 2814.873984] env[62684]: }. {{(pid=62684) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2814.874592] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6d1da9e-ac91-49a4-a551-cedbe9acfcbd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2814.881671] env[62684]: DEBUG oslo_vmware.rw_handles [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5258a177-01e1-922d-89ed-0256b9517f7a/disk-0.vmdk from lease info. {{(pid=62684) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2814.881848] env[62684]: DEBUG oslo_vmware.rw_handles [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5258a177-01e1-922d-89ed-0256b9517f7a/disk-0.vmdk for reading. {{(pid=62684) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2814.972069] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-ac3705c0-e9d5-4b69-a2ec-113b7fef5bba {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2816.028262] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2817.301073] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2823.578643] env[62684]: DEBUG oslo_vmware.rw_handles [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5258a177-01e1-922d-89ed-0256b9517f7a/disk-0.vmdk. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2823.579642] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ee84f10-1504-4912-90ea-130567d0b5f7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2823.586568] env[62684]: DEBUG oslo_vmware.rw_handles [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5258a177-01e1-922d-89ed-0256b9517f7a/disk-0.vmdk is in state: ready. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2823.586746] env[62684]: ERROR oslo_vmware.rw_handles [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5258a177-01e1-922d-89ed-0256b9517f7a/disk-0.vmdk due to incomplete transfer. [ 2823.586977] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-e7e22360-2bd2-4f38-92a4-eccd3144823e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2823.594123] env[62684]: DEBUG oslo_vmware.rw_handles [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5258a177-01e1-922d-89ed-0256b9517f7a/disk-0.vmdk. {{(pid=62684) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2823.594327] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Uploaded image 8337f75f-b270-4b19-8b09-2a31e8e43b6e to the Glance image server {{(pid=62684) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2823.596572] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Destroying the VM {{(pid=62684) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2823.596852] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-96fcbe61-8d04-4418-a1d4-52c1ef2144e8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2823.602555] env[62684]: DEBUG oslo_vmware.api [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2823.602555] env[62684]: value = "task-2054010" [ 2823.602555] env[62684]: _type = "Task" [ 2823.602555] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2823.610665] env[62684]: DEBUG oslo_vmware.api [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054010, 'name': Destroy_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2824.112225] env[62684]: DEBUG oslo_vmware.api [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054010, 'name': Destroy_Task} progress is 33%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2824.613206] env[62684]: DEBUG oslo_vmware.api [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054010, 'name': Destroy_Task} progress is 33%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2825.114861] env[62684]: DEBUG oslo_vmware.api [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054010, 'name': Destroy_Task} progress is 33%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2825.615482] env[62684]: DEBUG oslo_vmware.api [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054010, 'name': Destroy_Task} progress is 33%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2826.116062] env[62684]: DEBUG oslo_vmware.api [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054010, 'name': Destroy_Task} progress is 33%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2826.616837] env[62684]: DEBUG oslo_vmware.api [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054010, 'name': Destroy_Task} progress is 33%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2827.117996] env[62684]: DEBUG oslo_vmware.api [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054010, 'name': Destroy_Task} progress is 33%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2827.620653] env[62684]: DEBUG oslo_vmware.api [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054010, 'name': Destroy_Task} progress is 33%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2828.119622] env[62684]: DEBUG oslo_vmware.api [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054010, 'name': Destroy_Task} progress is 33%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2828.620905] env[62684]: DEBUG oslo_vmware.api [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054010, 'name': Destroy_Task} progress is 33%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2829.122395] env[62684]: DEBUG oslo_vmware.api [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054010, 'name': Destroy_Task} progress is 33%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2829.624274] env[62684]: DEBUG oslo_vmware.api [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054010, 'name': Destroy_Task} progress is 33%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2830.124633] env[62684]: DEBUG oslo_vmware.api [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054010, 'name': Destroy_Task} progress is 33%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2830.626301] env[62684]: DEBUG oslo_vmware.api [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054010, 'name': Destroy_Task} progress is 33%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2831.126811] env[62684]: DEBUG oslo_vmware.api [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054010, 'name': Destroy_Task} progress is 33%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2831.628340] env[62684]: DEBUG oslo_vmware.api [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054010, 'name': Destroy_Task, 'duration_secs': 7.555923} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2831.628760] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Destroyed the VM [ 2831.628861] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Deleting Snapshot of the VM instance {{(pid=62684) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2831.629174] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-0aee5b60-03d9-4339-b285-2f0d3eeb6ef5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2831.635284] env[62684]: DEBUG oslo_vmware.api [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2831.635284] env[62684]: value = "task-2054011" [ 2831.635284] env[62684]: _type = "Task" [ 2831.635284] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2831.642443] env[62684]: DEBUG oslo_vmware.api [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054011, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2832.145330] env[62684]: DEBUG oslo_vmware.api [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054011, 'name': RemoveSnapshot_Task, 'duration_secs': 0.353392} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2832.145613] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Deleted Snapshot of the VM instance {{(pid=62684) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2832.145922] env[62684]: DEBUG nova.compute.manager [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2832.146689] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-671f978c-903f-40e8-80d0-94597c64e6d1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2832.660464] env[62684]: INFO nova.compute.manager [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Shelve offloading [ 2832.662123] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2832.662380] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c84c43d3-78ea-4093-aa7a-dd21171e5311 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2832.670133] env[62684]: DEBUG oslo_vmware.api [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2832.670133] env[62684]: value = "task-2054012" [ 2832.670133] env[62684]: _type = "Task" [ 2832.670133] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2832.677665] env[62684]: DEBUG oslo_vmware.api [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054012, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2833.180083] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] VM already powered off {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2833.180282] env[62684]: DEBUG nova.compute.manager [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2833.181013] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-674dcb84-0e7c-405e-a0fb-fb84ad5ddb69 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2833.186311] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "refresh_cache-f87499c1-c122-41e1-bb86-21ce5cebc705" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2833.186477] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquired lock "refresh_cache-f87499c1-c122-41e1-bb86-21ce5cebc705" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2833.186656] env[62684]: DEBUG nova.network.neutron [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2833.886729] env[62684]: DEBUG nova.network.neutron [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Updating instance_info_cache with network_info: [{"id": "6e533f20-209a-4c15-b94c-a5c1008f13ad", "address": "fa:16:3e:e3:0d:d8", "network": {"id": "93f58a85-0f77-4adc-88c5-bee1aa383535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1446072689-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21534190adb0460e9a74363ae059a59d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e533f20-20", "ovs_interfaceid": "6e533f20-209a-4c15-b94c-a5c1008f13ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2834.389432] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Releasing lock "refresh_cache-f87499c1-c122-41e1-bb86-21ce5cebc705" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2834.621816] env[62684]: DEBUG nova.compute.manager [req-69641f8e-d16c-470d-a36c-9772a61bff69 req-cec7b4f2-b2b6-47ba-a9c3-cd5ec350b75e service nova] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Received event network-vif-unplugged-6e533f20-209a-4c15-b94c-a5c1008f13ad {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2834.621972] env[62684]: DEBUG oslo_concurrency.lockutils [req-69641f8e-d16c-470d-a36c-9772a61bff69 req-cec7b4f2-b2b6-47ba-a9c3-cd5ec350b75e service nova] Acquiring lock "f87499c1-c122-41e1-bb86-21ce5cebc705-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2834.622211] env[62684]: DEBUG oslo_concurrency.lockutils [req-69641f8e-d16c-470d-a36c-9772a61bff69 req-cec7b4f2-b2b6-47ba-a9c3-cd5ec350b75e service nova] Lock "f87499c1-c122-41e1-bb86-21ce5cebc705-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2834.622428] env[62684]: DEBUG oslo_concurrency.lockutils [req-69641f8e-d16c-470d-a36c-9772a61bff69 req-cec7b4f2-b2b6-47ba-a9c3-cd5ec350b75e service nova] Lock "f87499c1-c122-41e1-bb86-21ce5cebc705-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2834.622633] env[62684]: DEBUG nova.compute.manager [req-69641f8e-d16c-470d-a36c-9772a61bff69 req-cec7b4f2-b2b6-47ba-a9c3-cd5ec350b75e service nova] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] No waiting events found dispatching network-vif-unplugged-6e533f20-209a-4c15-b94c-a5c1008f13ad {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2834.622856] env[62684]: WARNING nova.compute.manager [req-69641f8e-d16c-470d-a36c-9772a61bff69 req-cec7b4f2-b2b6-47ba-a9c3-cd5ec350b75e service nova] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Received unexpected event network-vif-unplugged-6e533f20-209a-4c15-b94c-a5c1008f13ad for instance with vm_state shelved and task_state shelving_offloading. [ 2834.718035] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2834.719019] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1137072d-ca64-47c4-a933-e5e0adf56e1f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2834.726453] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2834.726709] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-50be4d58-ea57-449b-a013-09b9d5041502 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2834.808569] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2834.808811] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2834.808991] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Deleting the datastore file [datastore1] f87499c1-c122-41e1-bb86-21ce5cebc705 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2834.809289] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-068429ac-6ad7-4d85-92ae-3c577cfd7921 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2834.816970] env[62684]: DEBUG oslo_vmware.api [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2834.816970] env[62684]: value = "task-2054014" [ 2834.816970] env[62684]: _type = "Task" [ 2834.816970] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2834.823898] env[62684]: DEBUG oslo_vmware.api [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054014, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2835.327206] env[62684]: DEBUG oslo_vmware.api [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054014, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140444} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2835.327617] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2835.327670] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2835.327836] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2835.350550] env[62684]: INFO nova.scheduler.client.report [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Deleted allocations for instance f87499c1-c122-41e1-bb86-21ce5cebc705 [ 2835.855711] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2835.856050] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2835.856309] env[62684]: DEBUG nova.objects.instance [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lazy-loading 'resources' on Instance uuid f87499c1-c122-41e1-bb86-21ce5cebc705 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2836.359080] env[62684]: DEBUG nova.objects.instance [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lazy-loading 'numa_topology' on Instance uuid f87499c1-c122-41e1-bb86-21ce5cebc705 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2836.648934] env[62684]: DEBUG nova.compute.manager [req-03fb9814-442d-4d5f-aee9-6ce468bd1be6 req-3f765baf-7ad4-42d2-8278-7567d9e6b470 service nova] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Received event network-changed-6e533f20-209a-4c15-b94c-a5c1008f13ad {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2836.649466] env[62684]: DEBUG nova.compute.manager [req-03fb9814-442d-4d5f-aee9-6ce468bd1be6 req-3f765baf-7ad4-42d2-8278-7567d9e6b470 service nova] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Refreshing instance network info cache due to event network-changed-6e533f20-209a-4c15-b94c-a5c1008f13ad. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2836.649702] env[62684]: DEBUG oslo_concurrency.lockutils [req-03fb9814-442d-4d5f-aee9-6ce468bd1be6 req-3f765baf-7ad4-42d2-8278-7567d9e6b470 service nova] Acquiring lock "refresh_cache-f87499c1-c122-41e1-bb86-21ce5cebc705" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2836.649855] env[62684]: DEBUG oslo_concurrency.lockutils [req-03fb9814-442d-4d5f-aee9-6ce468bd1be6 req-3f765baf-7ad4-42d2-8278-7567d9e6b470 service nova] Acquired lock "refresh_cache-f87499c1-c122-41e1-bb86-21ce5cebc705" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2836.650034] env[62684]: DEBUG nova.network.neutron [req-03fb9814-442d-4d5f-aee9-6ce468bd1be6 req-3f765baf-7ad4-42d2-8278-7567d9e6b470 service nova] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Refreshing network info cache for port 6e533f20-209a-4c15-b94c-a5c1008f13ad {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2836.861632] env[62684]: DEBUG nova.objects.base [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=62684) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2836.888750] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08831407-4f60-419c-8590-ce30268d3273 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2836.896754] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bc81f60-0a6e-4e15-8ca4-53c9f9de10d4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2836.929019] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c94a13e8-f0cd-452a-aed4-578b3820ddcf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2836.937274] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78cc7ed0-c5bd-4500-bbe9-862c9c674c43 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2836.951017] env[62684]: DEBUG nova.compute.provider_tree [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2837.088210] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "f87499c1-c122-41e1-bb86-21ce5cebc705" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2837.352019] env[62684]: DEBUG nova.network.neutron [req-03fb9814-442d-4d5f-aee9-6ce468bd1be6 req-3f765baf-7ad4-42d2-8278-7567d9e6b470 service nova] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Updated VIF entry in instance network info cache for port 6e533f20-209a-4c15-b94c-a5c1008f13ad. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2837.352405] env[62684]: DEBUG nova.network.neutron [req-03fb9814-442d-4d5f-aee9-6ce468bd1be6 req-3f765baf-7ad4-42d2-8278-7567d9e6b470 service nova] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Updating instance_info_cache with network_info: [{"id": "6e533f20-209a-4c15-b94c-a5c1008f13ad", "address": "fa:16:3e:e3:0d:d8", "network": {"id": "93f58a85-0f77-4adc-88c5-bee1aa383535", "bridge": null, "label": "tempest-ServerActionsTestOtherB-1446072689-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21534190adb0460e9a74363ae059a59d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap6e533f20-20", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2837.455145] env[62684]: DEBUG nova.scheduler.client.report [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2837.854932] env[62684]: DEBUG oslo_concurrency.lockutils [req-03fb9814-442d-4d5f-aee9-6ce468bd1be6 req-3f765baf-7ad4-42d2-8278-7567d9e6b470 service nova] Releasing lock "refresh_cache-f87499c1-c122-41e1-bb86-21ce5cebc705" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2837.960230] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.104s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2838.469708] env[62684]: DEBUG oslo_concurrency.lockutils [None req-e6b45f59-657d-4d6d-ab1e-010d5aedfa4b tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "f87499c1-c122-41e1-bb86-21ce5cebc705" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 28.757s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2838.470570] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "f87499c1-c122-41e1-bb86-21ce5cebc705" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.383s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2838.470759] env[62684]: INFO nova.compute.manager [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Unshelving [ 2839.494458] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2839.494736] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2839.494947] env[62684]: DEBUG nova.objects.instance [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lazy-loading 'pci_requests' on Instance uuid f87499c1-c122-41e1-bb86-21ce5cebc705 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2839.999518] env[62684]: DEBUG nova.objects.instance [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lazy-loading 'numa_topology' on Instance uuid f87499c1-c122-41e1-bb86-21ce5cebc705 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2840.501836] env[62684]: INFO nova.compute.claims [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2841.536045] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-382e8f85-8732-4d63-90c3-26a2cf156139 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2841.543825] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b7c0538-5c56-4168-a81b-86859595c89c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2841.572575] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ca6d178-a122-47c8-9afb-91d178997b68 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2841.579956] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7dc39fb-a35d-4cee-8662-705e8f7d0af0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2841.592802] env[62684]: DEBUG nova.compute.provider_tree [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2842.095927] env[62684]: DEBUG nova.scheduler.client.report [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2842.601342] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.106s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2842.630171] env[62684]: INFO nova.network.neutron [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Updating port 6e533f20-209a-4c15-b94c-a5c1008f13ad with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 2844.010395] env[62684]: DEBUG nova.compute.manager [req-44935c46-dadf-432c-bd4e-978371a2118a req-98148e07-c269-4a0e-918f-cca41475d83f service nova] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Received event network-vif-plugged-6e533f20-209a-4c15-b94c-a5c1008f13ad {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2844.010395] env[62684]: DEBUG oslo_concurrency.lockutils [req-44935c46-dadf-432c-bd4e-978371a2118a req-98148e07-c269-4a0e-918f-cca41475d83f service nova] Acquiring lock "f87499c1-c122-41e1-bb86-21ce5cebc705-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2844.010395] env[62684]: DEBUG oslo_concurrency.lockutils [req-44935c46-dadf-432c-bd4e-978371a2118a req-98148e07-c269-4a0e-918f-cca41475d83f service nova] Lock "f87499c1-c122-41e1-bb86-21ce5cebc705-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2844.010395] env[62684]: DEBUG oslo_concurrency.lockutils [req-44935c46-dadf-432c-bd4e-978371a2118a req-98148e07-c269-4a0e-918f-cca41475d83f service nova] Lock "f87499c1-c122-41e1-bb86-21ce5cebc705-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2844.011109] env[62684]: DEBUG nova.compute.manager [req-44935c46-dadf-432c-bd4e-978371a2118a req-98148e07-c269-4a0e-918f-cca41475d83f service nova] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] No waiting events found dispatching network-vif-plugged-6e533f20-209a-4c15-b94c-a5c1008f13ad {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2844.011471] env[62684]: WARNING nova.compute.manager [req-44935c46-dadf-432c-bd4e-978371a2118a req-98148e07-c269-4a0e-918f-cca41475d83f service nova] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Received unexpected event network-vif-plugged-6e533f20-209a-4c15-b94c-a5c1008f13ad for instance with vm_state shelved_offloaded and task_state spawning. [ 2844.099451] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "refresh_cache-f87499c1-c122-41e1-bb86-21ce5cebc705" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2844.099703] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquired lock "refresh_cache-f87499c1-c122-41e1-bb86-21ce5cebc705" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2844.099924] env[62684]: DEBUG nova.network.neutron [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2844.801855] env[62684]: DEBUG nova.network.neutron [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Updating instance_info_cache with network_info: [{"id": "6e533f20-209a-4c15-b94c-a5c1008f13ad", "address": "fa:16:3e:e3:0d:d8", "network": {"id": "93f58a85-0f77-4adc-88c5-bee1aa383535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1446072689-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21534190adb0460e9a74363ae059a59d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e533f20-20", "ovs_interfaceid": "6e533f20-209a-4c15-b94c-a5c1008f13ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2845.304436] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Releasing lock "refresh_cache-f87499c1-c122-41e1-bb86-21ce5cebc705" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2845.333263] env[62684]: DEBUG nova.virt.hardware [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='6434c26d6e693bcc3b2c93f9e4d90015',container_format='bare',created_at=2025-01-10T08:04:10Z,direct_url=,disk_format='vmdk',id=8337f75f-b270-4b19-8b09-2a31e8e43b6e,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-307428727-shelved',owner='21534190adb0460e9a74363ae059a59d',properties=ImageMetaProps,protected=,size=31669248,status='active',tags=,updated_at=2025-01-10T08:04:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2845.333539] env[62684]: DEBUG nova.virt.hardware [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2845.333701] env[62684]: DEBUG nova.virt.hardware [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2845.333909] env[62684]: DEBUG nova.virt.hardware [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2845.334118] env[62684]: DEBUG nova.virt.hardware [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2845.334284] env[62684]: DEBUG nova.virt.hardware [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2845.334496] env[62684]: DEBUG nova.virt.hardware [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2845.334665] env[62684]: DEBUG nova.virt.hardware [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2845.334829] env[62684]: DEBUG nova.virt.hardware [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2845.334997] env[62684]: DEBUG nova.virt.hardware [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2845.335198] env[62684]: DEBUG nova.virt.hardware [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2845.336076] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12a361fc-9c07-44cc-a9f5-f03f690b10db {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2845.343907] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-026710eb-d222-4097-a49d-3e2c47cfd0b7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2845.356697] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e3:0d:d8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7874ee7f-20c7-4bd8-a750-ed489e9acc65', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6e533f20-209a-4c15-b94c-a5c1008f13ad', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2845.363860] env[62684]: DEBUG oslo.service.loopingcall [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2845.364155] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2845.364366] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c95c9e09-6855-4b52-9eab-a87170f63e5b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2845.382780] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2845.382780] env[62684]: value = "task-2054015" [ 2845.382780] env[62684]: _type = "Task" [ 2845.382780] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2845.389858] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2054015, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2845.892481] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2054015, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2846.033737] env[62684]: DEBUG nova.compute.manager [req-b8810fd4-9a64-4a6a-8f8e-b6d37491929d req-859c1204-ae0c-4c54-857c-8c2cebd3fc0f service nova] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Received event network-changed-6e533f20-209a-4c15-b94c-a5c1008f13ad {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2846.033992] env[62684]: DEBUG nova.compute.manager [req-b8810fd4-9a64-4a6a-8f8e-b6d37491929d req-859c1204-ae0c-4c54-857c-8c2cebd3fc0f service nova] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Refreshing instance network info cache due to event network-changed-6e533f20-209a-4c15-b94c-a5c1008f13ad. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2846.034210] env[62684]: DEBUG oslo_concurrency.lockutils [req-b8810fd4-9a64-4a6a-8f8e-b6d37491929d req-859c1204-ae0c-4c54-857c-8c2cebd3fc0f service nova] Acquiring lock "refresh_cache-f87499c1-c122-41e1-bb86-21ce5cebc705" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2846.034376] env[62684]: DEBUG oslo_concurrency.lockutils [req-b8810fd4-9a64-4a6a-8f8e-b6d37491929d req-859c1204-ae0c-4c54-857c-8c2cebd3fc0f service nova] Acquired lock "refresh_cache-f87499c1-c122-41e1-bb86-21ce5cebc705" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2846.034494] env[62684]: DEBUG nova.network.neutron [req-b8810fd4-9a64-4a6a-8f8e-b6d37491929d req-859c1204-ae0c-4c54-857c-8c2cebd3fc0f service nova] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Refreshing network info cache for port 6e533f20-209a-4c15-b94c-a5c1008f13ad {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2846.393013] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2054015, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2846.725725] env[62684]: DEBUG nova.network.neutron [req-b8810fd4-9a64-4a6a-8f8e-b6d37491929d req-859c1204-ae0c-4c54-857c-8c2cebd3fc0f service nova] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Updated VIF entry in instance network info cache for port 6e533f20-209a-4c15-b94c-a5c1008f13ad. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2846.726162] env[62684]: DEBUG nova.network.neutron [req-b8810fd4-9a64-4a6a-8f8e-b6d37491929d req-859c1204-ae0c-4c54-857c-8c2cebd3fc0f service nova] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Updating instance_info_cache with network_info: [{"id": "6e533f20-209a-4c15-b94c-a5c1008f13ad", "address": "fa:16:3e:e3:0d:d8", "network": {"id": "93f58a85-0f77-4adc-88c5-bee1aa383535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1446072689-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21534190adb0460e9a74363ae059a59d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e533f20-20", "ovs_interfaceid": "6e533f20-209a-4c15-b94c-a5c1008f13ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2846.892873] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2054015, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2847.229291] env[62684]: DEBUG oslo_concurrency.lockutils [req-b8810fd4-9a64-4a6a-8f8e-b6d37491929d req-859c1204-ae0c-4c54-857c-8c2cebd3fc0f service nova] Releasing lock "refresh_cache-f87499c1-c122-41e1-bb86-21ce5cebc705" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2847.393600] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2054015, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2847.897981] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2054015, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2848.394340] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2054015, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2848.896187] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2054015, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2849.395865] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2054015, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2849.896576] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2054015, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2850.397812] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2054015, 'name': CreateVM_Task} progress is 25%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2850.897929] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2054015, 'name': CreateVM_Task} progress is 99%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2851.398447] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2054015, 'name': CreateVM_Task, 'duration_secs': 5.586206} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2851.398897] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2851.399301] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8337f75f-b270-4b19-8b09-2a31e8e43b6e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2851.399485] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8337f75f-b270-4b19-8b09-2a31e8e43b6e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2851.399900] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/8337f75f-b270-4b19-8b09-2a31e8e43b6e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2851.400181] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b728c6e9-1ace-4ce4-aecb-ef8718b3035c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2851.404506] env[62684]: DEBUG oslo_vmware.api [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2851.404506] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ed7978-f326-e59b-9fd1-757138e46b8e" [ 2851.404506] env[62684]: _type = "Task" [ 2851.404506] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2851.411707] env[62684]: DEBUG oslo_vmware.api [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52ed7978-f326-e59b-9fd1-757138e46b8e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2851.914891] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8337f75f-b270-4b19-8b09-2a31e8e43b6e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2851.915181] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Processing image 8337f75f-b270-4b19-8b09-2a31e8e43b6e {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2851.915423] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8337f75f-b270-4b19-8b09-2a31e8e43b6e/8337f75f-b270-4b19-8b09-2a31e8e43b6e.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2851.915575] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8337f75f-b270-4b19-8b09-2a31e8e43b6e/8337f75f-b270-4b19-8b09-2a31e8e43b6e.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2851.915755] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2851.916016] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-11feebee-c7ee-4903-8fe1-a1b1c546e978 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2851.923991] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2851.924209] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62684) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2851.924866] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff5d66b2-a659-46f0-bb00-795c9c5ee8a9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2851.929405] env[62684]: DEBUG oslo_vmware.api [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2851.929405] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5268e664-7612-0969-34b4-5ee91262f3a5" [ 2851.929405] env[62684]: _type = "Task" [ 2851.929405] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2851.936559] env[62684]: DEBUG oslo_vmware.api [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5268e664-7612-0969-34b4-5ee91262f3a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2852.439586] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Preparing fetch location {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2852.439931] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Fetch image to [datastore1] OSTACK_IMG_a0b60e54-e768-43cf-8706-bcd9a18c1296/OSTACK_IMG_a0b60e54-e768-43cf-8706-bcd9a18c1296.vmdk {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2852.440113] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Downloading stream optimized image 8337f75f-b270-4b19-8b09-2a31e8e43b6e to [datastore1] OSTACK_IMG_a0b60e54-e768-43cf-8706-bcd9a18c1296/OSTACK_IMG_a0b60e54-e768-43cf-8706-bcd9a18c1296.vmdk on the data store datastore1 as vApp {{(pid=62684) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 2852.440303] env[62684]: DEBUG nova.virt.vmwareapi.images [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Downloading image file data 8337f75f-b270-4b19-8b09-2a31e8e43b6e to the ESX as VM named 'OSTACK_IMG_a0b60e54-e768-43cf-8706-bcd9a18c1296' {{(pid=62684) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 2852.503382] env[62684]: DEBUG oslo_vmware.rw_handles [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 2852.503382] env[62684]: value = "resgroup-9" [ 2852.503382] env[62684]: _type = "ResourcePool" [ 2852.503382] env[62684]: }. {{(pid=62684) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 2852.503727] env[62684]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-d904aec7-2749-4afe-b8c4-d95504a1f839 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2852.523496] env[62684]: DEBUG oslo_vmware.rw_handles [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lease: (returnval){ [ 2852.523496] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52bba598-dffe-707d-3e93-801643152622" [ 2852.523496] env[62684]: _type = "HttpNfcLease" [ 2852.523496] env[62684]: } obtained for vApp import into resource pool (val){ [ 2852.523496] env[62684]: value = "resgroup-9" [ 2852.523496] env[62684]: _type = "ResourcePool" [ 2852.523496] env[62684]: }. {{(pid=62684) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 2852.523912] env[62684]: DEBUG oslo_vmware.api [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the lease: (returnval){ [ 2852.523912] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52bba598-dffe-707d-3e93-801643152622" [ 2852.523912] env[62684]: _type = "HttpNfcLease" [ 2852.523912] env[62684]: } to be ready. {{(pid=62684) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2852.529986] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2852.529986] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52bba598-dffe-707d-3e93-801643152622" [ 2852.529986] env[62684]: _type = "HttpNfcLease" [ 2852.529986] env[62684]: } is initializing. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2853.031929] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2853.031929] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52bba598-dffe-707d-3e93-801643152622" [ 2853.031929] env[62684]: _type = "HttpNfcLease" [ 2853.031929] env[62684]: } is initializing. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2853.533211] env[62684]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2853.533211] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52bba598-dffe-707d-3e93-801643152622" [ 2853.533211] env[62684]: _type = "HttpNfcLease" [ 2853.533211] env[62684]: } is ready. {{(pid=62684) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2853.533714] env[62684]: DEBUG oslo_vmware.rw_handles [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2853.533714] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52bba598-dffe-707d-3e93-801643152622" [ 2853.533714] env[62684]: _type = "HttpNfcLease" [ 2853.533714] env[62684]: }. {{(pid=62684) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 2853.534267] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fd91168-0709-4226-aa85-c9dcd789b26a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2853.541177] env[62684]: DEBUG oslo_vmware.rw_handles [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5278baf7-693b-e51b-6783-8ad2b6836c5b/disk-0.vmdk from lease info. {{(pid=62684) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2853.541363] env[62684]: DEBUG oslo_vmware.rw_handles [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Creating HTTP connection to write to file with size = 31669248 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5278baf7-693b-e51b-6783-8ad2b6836c5b/disk-0.vmdk. {{(pid=62684) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2853.603896] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-ed7285fe-0165-4182-ba35-241996359532 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2854.658099] env[62684]: DEBUG oslo_vmware.rw_handles [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Completed reading data from the image iterator. {{(pid=62684) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2854.658561] env[62684]: DEBUG oslo_vmware.rw_handles [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5278baf7-693b-e51b-6783-8ad2b6836c5b/disk-0.vmdk. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2854.659282] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a415b15-6cfb-4bfe-a7aa-de02d073329f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2854.667259] env[62684]: DEBUG oslo_vmware.rw_handles [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5278baf7-693b-e51b-6783-8ad2b6836c5b/disk-0.vmdk is in state: ready. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2854.667459] env[62684]: DEBUG oslo_vmware.rw_handles [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5278baf7-693b-e51b-6783-8ad2b6836c5b/disk-0.vmdk. {{(pid=62684) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 2854.667748] env[62684]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-a84cf79e-d8c6-46dd-ace1-0bf266baa057 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2854.948159] env[62684]: DEBUG oslo_vmware.rw_handles [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5278baf7-693b-e51b-6783-8ad2b6836c5b/disk-0.vmdk. {{(pid=62684) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 2854.948402] env[62684]: INFO nova.virt.vmwareapi.images [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Downloaded image file data 8337f75f-b270-4b19-8b09-2a31e8e43b6e [ 2854.949288] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fb43a11-9ff0-4051-848d-d3886a27e3bc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2854.966779] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d88d3588-6238-4e60-992d-724f736e64ec {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2855.011394] env[62684]: INFO nova.virt.vmwareapi.images [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] The imported VM was unregistered [ 2855.013920] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Caching image {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2855.014198] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Creating directory with path [datastore1] devstack-image-cache_base/8337f75f-b270-4b19-8b09-2a31e8e43b6e {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2855.014483] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dfdd725f-a9d7-45b7-8580-8fd255228b2c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2855.026425] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Created directory with path [datastore1] devstack-image-cache_base/8337f75f-b270-4b19-8b09-2a31e8e43b6e {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2855.026611] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_a0b60e54-e768-43cf-8706-bcd9a18c1296/OSTACK_IMG_a0b60e54-e768-43cf-8706-bcd9a18c1296.vmdk to [datastore1] devstack-image-cache_base/8337f75f-b270-4b19-8b09-2a31e8e43b6e/8337f75f-b270-4b19-8b09-2a31e8e43b6e.vmdk. {{(pid=62684) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 2855.026853] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-e36edfdc-c872-470d-ba31-ddef1eb7eabe {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2855.033284] env[62684]: DEBUG oslo_vmware.api [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2855.033284] env[62684]: value = "task-2054018" [ 2855.033284] env[62684]: _type = "Task" [ 2855.033284] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2855.040544] env[62684]: DEBUG oslo_vmware.api [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054018, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2855.546792] env[62684]: DEBUG oslo_vmware.api [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054018, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2856.043876] env[62684]: DEBUG oslo_vmware.api [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054018, 'name': MoveVirtualDisk_Task} progress is 32%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2856.546266] env[62684]: DEBUG oslo_vmware.api [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054018, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2857.048241] env[62684]: DEBUG oslo_vmware.api [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054018, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2857.552074] env[62684]: DEBUG oslo_vmware.api [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054018, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2858.049812] env[62684]: DEBUG oslo_vmware.api [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054018, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.882465} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2858.050240] env[62684]: INFO nova.virt.vmwareapi.ds_util [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_a0b60e54-e768-43cf-8706-bcd9a18c1296/OSTACK_IMG_a0b60e54-e768-43cf-8706-bcd9a18c1296.vmdk to [datastore1] devstack-image-cache_base/8337f75f-b270-4b19-8b09-2a31e8e43b6e/8337f75f-b270-4b19-8b09-2a31e8e43b6e.vmdk. [ 2858.050293] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Cleaning up location [datastore1] OSTACK_IMG_a0b60e54-e768-43cf-8706-bcd9a18c1296 {{(pid=62684) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 2858.050502] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_a0b60e54-e768-43cf-8706-bcd9a18c1296 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2858.050759] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-54ec47cf-3b5e-4a26-828c-1aa6eba810e3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2858.057047] env[62684]: DEBUG oslo_vmware.api [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2858.057047] env[62684]: value = "task-2054019" [ 2858.057047] env[62684]: _type = "Task" [ 2858.057047] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2858.064553] env[62684]: DEBUG oslo_vmware.api [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054019, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2858.567263] env[62684]: DEBUG oslo_vmware.api [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054019, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.033077} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2858.567541] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2858.567715] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8337f75f-b270-4b19-8b09-2a31e8e43b6e/8337f75f-b270-4b19-8b09-2a31e8e43b6e.vmdk" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2858.567964] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/8337f75f-b270-4b19-8b09-2a31e8e43b6e/8337f75f-b270-4b19-8b09-2a31e8e43b6e.vmdk to [datastore1] f87499c1-c122-41e1-bb86-21ce5cebc705/f87499c1-c122-41e1-bb86-21ce5cebc705.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2858.568237] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c846dd82-3761-4a16-b550-a84fed4fa0ba {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2858.575030] env[62684]: DEBUG oslo_vmware.api [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2858.575030] env[62684]: value = "task-2054020" [ 2858.575030] env[62684]: _type = "Task" [ 2858.575030] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2858.582038] env[62684]: DEBUG oslo_vmware.api [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054020, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2859.085723] env[62684]: DEBUG oslo_vmware.api [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054020, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2859.588397] env[62684]: DEBUG oslo_vmware.api [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054020, 'name': CopyVirtualDisk_Task} progress is 32%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2860.090183] env[62684]: DEBUG oslo_vmware.api [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054020, 'name': CopyVirtualDisk_Task} progress is 54%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2860.588676] env[62684]: DEBUG oslo_vmware.api [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054020, 'name': CopyVirtualDisk_Task} progress is 71%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2861.091876] env[62684]: DEBUG oslo_vmware.api [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054020, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2861.591202] env[62684]: DEBUG oslo_vmware.api [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054020, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.808242} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2861.591475] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/8337f75f-b270-4b19-8b09-2a31e8e43b6e/8337f75f-b270-4b19-8b09-2a31e8e43b6e.vmdk to [datastore1] f87499c1-c122-41e1-bb86-21ce5cebc705/f87499c1-c122-41e1-bb86-21ce5cebc705.vmdk {{(pid=62684) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2861.592294] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34931d41-3562-4b5d-87e7-0cce27b71024 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2861.614849] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] f87499c1-c122-41e1-bb86-21ce5cebc705/f87499c1-c122-41e1-bb86-21ce5cebc705.vmdk or device None with type streamOptimized {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2861.615118] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8a3bfed1-700d-40d1-b675-a77631f8d544 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2861.633908] env[62684]: DEBUG oslo_vmware.api [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2861.633908] env[62684]: value = "task-2054021" [ 2861.633908] env[62684]: _type = "Task" [ 2861.633908] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2861.641286] env[62684]: DEBUG oslo_vmware.api [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054021, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2862.143982] env[62684]: DEBUG oslo_vmware.api [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054021, 'name': ReconfigVM_Task, 'duration_secs': 0.291137} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2862.144444] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Reconfigured VM instance instance-0000007c to attach disk [datastore1] f87499c1-c122-41e1-bb86-21ce5cebc705/f87499c1-c122-41e1-bb86-21ce5cebc705.vmdk or device None with type streamOptimized {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2862.144954] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e06a60e3-d159-4b0e-9861-9140023de046 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2862.151849] env[62684]: DEBUG oslo_vmware.api [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2862.151849] env[62684]: value = "task-2054022" [ 2862.151849] env[62684]: _type = "Task" [ 2862.151849] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2862.159085] env[62684]: DEBUG oslo_vmware.api [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054022, 'name': Rename_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2862.661666] env[62684]: DEBUG oslo_vmware.api [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054022, 'name': Rename_Task, 'duration_secs': 0.147401} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2862.661942] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2862.662213] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1239cea0-c5db-4651-813d-89bd06142d45 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2862.667816] env[62684]: DEBUG oslo_vmware.api [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2862.667816] env[62684]: value = "task-2054023" [ 2862.667816] env[62684]: _type = "Task" [ 2862.667816] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2862.674905] env[62684]: DEBUG oslo_vmware.api [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054023, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2863.177624] env[62684]: DEBUG oslo_vmware.api [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054023, 'name': PowerOnVM_Task, 'duration_secs': 0.45047} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2863.178045] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2863.268635] env[62684]: DEBUG nova.compute.manager [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2863.269608] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-128435f9-4c4d-47a4-8c63-0d51556d5d99 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2863.300663] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2863.786666] env[62684]: DEBUG oslo_concurrency.lockutils [None req-f5e758b2-a856-470f-b08d-f1f763b2ab23 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "f87499c1-c122-41e1-bb86-21ce5cebc705" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 25.316s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2863.803673] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2863.803894] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2863.804079] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2863.804253] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62684) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2863.805135] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46b1a405-2daf-4f56-8739-c58bb33c96a2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2863.816417] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d55b0573-78f1-4284-8052-8dfacb4664ac {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2863.834354] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ec5ada4-bb4c-459c-b3d0-5f22ea1fd91c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2863.841530] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c499a19e-7d2f-4136-b339-60c38a65df86 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2863.871951] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181214MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=62684) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2863.872115] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2863.872318] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2864.323220] env[62684]: DEBUG oslo_concurrency.lockutils [None req-843f937c-253e-4d7d-8486-ffc267834b82 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "f87499c1-c122-41e1-bb86-21ce5cebc705" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2864.323615] env[62684]: DEBUG oslo_concurrency.lockutils [None req-843f937c-253e-4d7d-8486-ffc267834b82 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "f87499c1-c122-41e1-bb86-21ce5cebc705" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2864.323751] env[62684]: DEBUG oslo_concurrency.lockutils [None req-843f937c-253e-4d7d-8486-ffc267834b82 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "f87499c1-c122-41e1-bb86-21ce5cebc705-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2864.323923] env[62684]: DEBUG oslo_concurrency.lockutils [None req-843f937c-253e-4d7d-8486-ffc267834b82 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "f87499c1-c122-41e1-bb86-21ce5cebc705-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2864.324164] env[62684]: DEBUG oslo_concurrency.lockutils [None req-843f937c-253e-4d7d-8486-ffc267834b82 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "f87499c1-c122-41e1-bb86-21ce5cebc705-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2864.326363] env[62684]: INFO nova.compute.manager [None req-843f937c-253e-4d7d-8486-ffc267834b82 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Terminating instance [ 2864.328476] env[62684]: DEBUG nova.compute.manager [None req-843f937c-253e-4d7d-8486-ffc267834b82 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2864.328476] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-843f937c-253e-4d7d-8486-ffc267834b82 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2864.329247] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b8fc183-375a-4f22-b774-c9f3b603d8ba {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2864.336530] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-843f937c-253e-4d7d-8486-ffc267834b82 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2864.336766] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-88c5be31-4a0c-4cb8-bb20-ceba7bff3d9a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2864.342777] env[62684]: DEBUG oslo_vmware.api [None req-843f937c-253e-4d7d-8486-ffc267834b82 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2864.342777] env[62684]: value = "task-2054024" [ 2864.342777] env[62684]: _type = "Task" [ 2864.342777] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2864.351731] env[62684]: DEBUG oslo_vmware.api [None req-843f937c-253e-4d7d-8486-ffc267834b82 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054024, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2864.852533] env[62684]: DEBUG oslo_vmware.api [None req-843f937c-253e-4d7d-8486-ffc267834b82 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054024, 'name': PowerOffVM_Task, 'duration_secs': 0.19063} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2864.852807] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-843f937c-253e-4d7d-8486-ffc267834b82 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2864.852966] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-843f937c-253e-4d7d-8486-ffc267834b82 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2864.853239] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8a03db8e-4454-40df-b749-8688b2449305 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2864.897059] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance f87499c1-c122-41e1-bb86-21ce5cebc705 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2864.897242] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2864.897393] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2864.912334] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Refreshing inventories for resource provider c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2864.925659] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Updating ProviderTree inventory for provider c23c281e-ec1f-4876-972e-a98655f2084f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2864.925923] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Updating inventory in ProviderTree for provider c23c281e-ec1f-4876-972e-a98655f2084f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2864.931579] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-843f937c-253e-4d7d-8486-ffc267834b82 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2864.931885] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-843f937c-253e-4d7d-8486-ffc267834b82 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2864.932106] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-843f937c-253e-4d7d-8486-ffc267834b82 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Deleting the datastore file [datastore1] f87499c1-c122-41e1-bb86-21ce5cebc705 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2864.932378] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-87aafce4-279f-44c2-8dd4-f1a1ab8570cc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2864.938139] env[62684]: DEBUG oslo_vmware.api [None req-843f937c-253e-4d7d-8486-ffc267834b82 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for the task: (returnval){ [ 2864.938139] env[62684]: value = "task-2054026" [ 2864.938139] env[62684]: _type = "Task" [ 2864.938139] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2864.938910] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Refreshing aggregate associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, aggregates: None {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2864.948340] env[62684]: DEBUG oslo_vmware.api [None req-843f937c-253e-4d7d-8486-ffc267834b82 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054026, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2864.959017] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Refreshing trait associations for resource provider c23c281e-ec1f-4876-972e-a98655f2084f, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62684) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2864.983068] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-246205b5-2b2f-4e78-97de-49ff75809c69 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2864.990360] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb7d64bb-3183-4c52-b898-3282458e0780 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2865.020714] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00c731db-3609-4f61-94bd-777e4d176153 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2865.026845] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cfd47f5-7828-4235-ad44-13e2b411f134 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2865.039517] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2865.450737] env[62684]: DEBUG oslo_vmware.api [None req-843f937c-253e-4d7d-8486-ffc267834b82 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Task: {'id': task-2054026, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13154} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2865.451159] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-843f937c-253e-4d7d-8486-ffc267834b82 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2865.451222] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-843f937c-253e-4d7d-8486-ffc267834b82 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2865.451375] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-843f937c-253e-4d7d-8486-ffc267834b82 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2865.451556] env[62684]: INFO nova.compute.manager [None req-843f937c-253e-4d7d-8486-ffc267834b82 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Took 1.12 seconds to destroy the instance on the hypervisor. [ 2865.451798] env[62684]: DEBUG oslo.service.loopingcall [None req-843f937c-253e-4d7d-8486-ffc267834b82 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2865.451997] env[62684]: DEBUG nova.compute.manager [-] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2865.452105] env[62684]: DEBUG nova.network.neutron [-] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2865.542713] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2865.882054] env[62684]: DEBUG nova.compute.manager [req-d66090e8-ecad-4acc-b697-a57a279e8c74 req-4fe43322-dd6d-4bb8-b5e0-3e18223fc6dc service nova] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Received event network-vif-deleted-6e533f20-209a-4c15-b94c-a5c1008f13ad {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2865.882297] env[62684]: INFO nova.compute.manager [req-d66090e8-ecad-4acc-b697-a57a279e8c74 req-4fe43322-dd6d-4bb8-b5e0-3e18223fc6dc service nova] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Neutron deleted interface 6e533f20-209a-4c15-b94c-a5c1008f13ad; detaching it from the instance and deleting it from the info cache [ 2865.882471] env[62684]: DEBUG nova.network.neutron [req-d66090e8-ecad-4acc-b697-a57a279e8c74 req-4fe43322-dd6d-4bb8-b5e0-3e18223fc6dc service nova] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2866.046810] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62684) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2866.047026] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.175s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2866.362152] env[62684]: DEBUG nova.network.neutron [-] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2866.386014] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ade8a013-6edf-44e3-b6ab-e30ffd8b63d1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2866.395930] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fba71b6-c7bd-43ec-9d4b-781af6eca78d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2866.419584] env[62684]: DEBUG nova.compute.manager [req-d66090e8-ecad-4acc-b697-a57a279e8c74 req-4fe43322-dd6d-4bb8-b5e0-3e18223fc6dc service nova] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Detach interface failed, port_id=6e533f20-209a-4c15-b94c-a5c1008f13ad, reason: Instance f87499c1-c122-41e1-bb86-21ce5cebc705 could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2866.865167] env[62684]: INFO nova.compute.manager [-] [instance: f87499c1-c122-41e1-bb86-21ce5cebc705] Took 1.41 seconds to deallocate network for instance. [ 2867.371703] env[62684]: DEBUG oslo_concurrency.lockutils [None req-843f937c-253e-4d7d-8486-ffc267834b82 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2867.371995] env[62684]: DEBUG oslo_concurrency.lockutils [None req-843f937c-253e-4d7d-8486-ffc267834b82 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2867.372247] env[62684]: DEBUG nova.objects.instance [None req-843f937c-253e-4d7d-8486-ffc267834b82 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lazy-loading 'resources' on Instance uuid f87499c1-c122-41e1-bb86-21ce5cebc705 {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2867.906725] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8adb7b0-224a-4857-b99d-0fe0e2af9ca3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2867.914137] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e998dabc-f3c6-4582-a672-d02097c92852 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2867.943392] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d470174-e2b3-42f4-9742-c1fffc33acd4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2867.950708] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc4ebd99-0415-4b36-9531-92eaf5d4d7d8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2867.963387] env[62684]: DEBUG nova.compute.provider_tree [None req-843f937c-253e-4d7d-8486-ffc267834b82 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2868.466655] env[62684]: DEBUG nova.scheduler.client.report [None req-843f937c-253e-4d7d-8486-ffc267834b82 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2868.971995] env[62684]: DEBUG oslo_concurrency.lockutils [None req-843f937c-253e-4d7d-8486-ffc267834b82 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.600s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2868.993945] env[62684]: INFO nova.scheduler.client.report [None req-843f937c-253e-4d7d-8486-ffc267834b82 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Deleted allocations for instance f87499c1-c122-41e1-bb86-21ce5cebc705 [ 2869.502531] env[62684]: DEBUG oslo_concurrency.lockutils [None req-843f937c-253e-4d7d-8486-ffc267834b82 tempest-ServerActionsTestOtherB-1953174230 tempest-ServerActionsTestOtherB-1953174230-project-member] Lock "f87499c1-c122-41e1-bb86-21ce5cebc705" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.179s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2872.046777] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2872.047221] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Starting heal instance info cache {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2872.047221] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Rebuilding the list of instances to heal {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2872.550180] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Didn't find any instances for network info cache update. {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 2872.550421] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2872.550609] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2872.550738] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2872.550897] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2872.551062] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2872.551198] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62684) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2875.801323] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2878.300972] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2880.296327] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2895.779049] env[62684]: DEBUG oslo_concurrency.lockutils [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Acquiring lock "435a33ca-05df-404e-8b98-a62dece47eba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2895.779385] env[62684]: DEBUG oslo_concurrency.lockutils [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Lock "435a33ca-05df-404e-8b98-a62dece47eba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2896.281340] env[62684]: DEBUG nova.compute.manager [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Starting instance... {{(pid=62684) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2896.803887] env[62684]: DEBUG oslo_concurrency.lockutils [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2896.804192] env[62684]: DEBUG oslo_concurrency.lockutils [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2896.806102] env[62684]: INFO nova.compute.claims [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2897.842445] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a5040b-a5c6-48d1-99b4-ed922999992a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2897.850995] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46d9bff0-17f6-4df6-a1d2-4610157f3900 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2897.879795] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-338e0df4-985f-4eb2-a733-501bad6ed700 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2897.886756] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd1674bb-3e7b-4ee0-bedb-395f4dd1246b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2897.899480] env[62684]: DEBUG nova.compute.provider_tree [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2898.402443] env[62684]: DEBUG nova.scheduler.client.report [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2898.907331] env[62684]: DEBUG oslo_concurrency.lockutils [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.103s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2898.907783] env[62684]: DEBUG nova.compute.manager [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Start building networks asynchronously for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2899.412598] env[62684]: DEBUG nova.compute.utils [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Using /dev/sd instead of None {{(pid=62684) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2899.414191] env[62684]: DEBUG nova.compute.manager [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Allocating IP information in the background. {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2899.414461] env[62684]: DEBUG nova.network.neutron [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] allocate_for_instance() {{(pid=62684) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2899.463607] env[62684]: DEBUG nova.policy [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '95eaa7932807400cae147ea99a585259', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '326156690fbe48ac93781ce4603e5ccb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62684) authorize /opt/stack/nova/nova/policy.py:201}} [ 2899.732524] env[62684]: DEBUG nova.network.neutron [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Successfully created port: 9b62de64-d50c-4dde-943b-8b68e671b61c {{(pid=62684) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2899.918855] env[62684]: DEBUG nova.compute.manager [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Start building block device mappings for instance. {{(pid=62684) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2900.424654] env[62684]: INFO nova.virt.block_device [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Booting with volume 918c1ac0-fd46-4303-b416-1e4b1db78a3b at /dev/sda [ 2900.460041] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-00205f5c-1468-4a12-a7ac-95e0e75232fb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2900.469323] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1b7397a-caa3-4363-80eb-cc151923ee0d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2900.492088] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0f7a2885-00fe-4194-a9a0-bfe4dbccc041 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2900.499863] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0147fff7-74b9-477d-bbc5-6d5a8ad17dd6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2900.522556] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b49939f0-25f4-42ba-ae57-01bc569f55e1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2900.529117] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52d04612-f30e-432e-87da-bc06858489d3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2900.542170] env[62684]: DEBUG nova.virt.block_device [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Updating existing volume attachment record: 0f843322-af31-41c2-962b-78f182742ca9 {{(pid=62684) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2901.091105] env[62684]: DEBUG nova.compute.manager [req-272d6215-1403-4b56-805b-72971347c61e req-00ff41c4-a80b-40f5-b032-6a194c5ada7b service nova] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Received event network-vif-plugged-9b62de64-d50c-4dde-943b-8b68e671b61c {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2901.091105] env[62684]: DEBUG oslo_concurrency.lockutils [req-272d6215-1403-4b56-805b-72971347c61e req-00ff41c4-a80b-40f5-b032-6a194c5ada7b service nova] Acquiring lock "435a33ca-05df-404e-8b98-a62dece47eba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2901.091105] env[62684]: DEBUG oslo_concurrency.lockutils [req-272d6215-1403-4b56-805b-72971347c61e req-00ff41c4-a80b-40f5-b032-6a194c5ada7b service nova] Lock "435a33ca-05df-404e-8b98-a62dece47eba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2901.091105] env[62684]: DEBUG oslo_concurrency.lockutils [req-272d6215-1403-4b56-805b-72971347c61e req-00ff41c4-a80b-40f5-b032-6a194c5ada7b service nova] Lock "435a33ca-05df-404e-8b98-a62dece47eba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2901.091822] env[62684]: DEBUG nova.compute.manager [req-272d6215-1403-4b56-805b-72971347c61e req-00ff41c4-a80b-40f5-b032-6a194c5ada7b service nova] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] No waiting events found dispatching network-vif-plugged-9b62de64-d50c-4dde-943b-8b68e671b61c {{(pid=62684) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2901.092934] env[62684]: WARNING nova.compute.manager [req-272d6215-1403-4b56-805b-72971347c61e req-00ff41c4-a80b-40f5-b032-6a194c5ada7b service nova] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Received unexpected event network-vif-plugged-9b62de64-d50c-4dde-943b-8b68e671b61c for instance with vm_state building and task_state block_device_mapping. [ 2901.174638] env[62684]: DEBUG nova.network.neutron [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Successfully updated port: 9b62de64-d50c-4dde-943b-8b68e671b61c {{(pid=62684) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2901.680353] env[62684]: DEBUG oslo_concurrency.lockutils [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Acquiring lock "refresh_cache-435a33ca-05df-404e-8b98-a62dece47eba" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2901.680524] env[62684]: DEBUG oslo_concurrency.lockutils [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Acquired lock "refresh_cache-435a33ca-05df-404e-8b98-a62dece47eba" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2901.680665] env[62684]: DEBUG nova.network.neutron [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Building network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2902.219920] env[62684]: DEBUG nova.network.neutron [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Instance cache missing network info. {{(pid=62684) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2902.341658] env[62684]: DEBUG nova.network.neutron [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Updating instance_info_cache with network_info: [{"id": "9b62de64-d50c-4dde-943b-8b68e671b61c", "address": "fa:16:3e:55:7d:fa", "network": {"id": "62056c7a-56ed-44de-b713-cc7a05fd5072", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1112290977-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "326156690fbe48ac93781ce4603e5ccb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7bdf594e-da7a-4254-b413-87aef4614588", "external-id": "nsx-vlan-transportzone-422", "segmentation_id": 422, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b62de64-d5", "ovs_interfaceid": "9b62de64-d50c-4dde-943b-8b68e671b61c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2902.621292] env[62684]: DEBUG nova.compute.manager [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Start spawning the instance on the hypervisor. {{(pid=62684) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2902.621865] env[62684]: DEBUG nova.virt.hardware [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-10T07:25:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2902.622111] env[62684]: DEBUG nova.virt.hardware [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Flavor limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2902.622279] env[62684]: DEBUG nova.virt.hardware [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Image limits 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2902.622472] env[62684]: DEBUG nova.virt.hardware [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Flavor pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2902.622620] env[62684]: DEBUG nova.virt.hardware [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Image pref 0:0:0 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2902.622771] env[62684]: DEBUG nova.virt.hardware [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62684) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2902.622986] env[62684]: DEBUG nova.virt.hardware [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2902.623172] env[62684]: DEBUG nova.virt.hardware [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2902.623349] env[62684]: DEBUG nova.virt.hardware [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Got 1 possible topologies {{(pid=62684) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2902.623518] env[62684]: DEBUG nova.virt.hardware [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2902.623699] env[62684]: DEBUG nova.virt.hardware [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62684) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2902.624644] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a99194f-1c22-45a8-ac1e-4254d425964d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2902.632912] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da16b9c6-07e4-4305-9f90-311e0b7c5f1e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2902.844232] env[62684]: DEBUG oslo_concurrency.lockutils [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Releasing lock "refresh_cache-435a33ca-05df-404e-8b98-a62dece47eba" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2902.844619] env[62684]: DEBUG nova.compute.manager [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Instance network_info: |[{"id": "9b62de64-d50c-4dde-943b-8b68e671b61c", "address": "fa:16:3e:55:7d:fa", "network": {"id": "62056c7a-56ed-44de-b713-cc7a05fd5072", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1112290977-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "326156690fbe48ac93781ce4603e5ccb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7bdf594e-da7a-4254-b413-87aef4614588", "external-id": "nsx-vlan-transportzone-422", "segmentation_id": 422, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b62de64-d5", "ovs_interfaceid": "9b62de64-d50c-4dde-943b-8b68e671b61c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62684) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2902.845123] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:55:7d:fa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7bdf594e-da7a-4254-b413-87aef4614588', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9b62de64-d50c-4dde-943b-8b68e671b61c', 'vif_model': 'vmxnet3'}] {{(pid=62684) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2902.852804] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Creating folder: Project (326156690fbe48ac93781ce4603e5ccb). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2902.853103] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5a498c2d-8d7a-4122-abfd-7b94864bd743 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2902.865735] env[62684]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 2902.865912] env[62684]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62684) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 2902.866238] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Folder already exists: Project (326156690fbe48ac93781ce4603e5ccb). Parent ref: group-v421118. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 2902.866430] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Creating folder: Instances. Parent ref: group-v421449. {{(pid=62684) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2902.866696] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-02c9b5b5-323b-45fa-9223-232817694ef9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2902.876981] env[62684]: INFO nova.virt.vmwareapi.vm_util [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Created folder: Instances in parent group-v421449. [ 2902.877223] env[62684]: DEBUG oslo.service.loopingcall [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2902.877415] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Creating VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2902.877652] env[62684]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ea2149b2-652a-40d3-8bbc-035151c2dcac {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2902.898221] env[62684]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2902.898221] env[62684]: value = "task-2054040" [ 2902.898221] env[62684]: _type = "Task" [ 2902.898221] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2902.905569] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2054040, 'name': CreateVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2903.117120] env[62684]: DEBUG nova.compute.manager [req-7bd4fc64-c4fc-4a58-ba44-05ba85430109 req-04c3136d-217b-4949-b006-2e413d069d91 service nova] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Received event network-changed-9b62de64-d50c-4dde-943b-8b68e671b61c {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2903.117254] env[62684]: DEBUG nova.compute.manager [req-7bd4fc64-c4fc-4a58-ba44-05ba85430109 req-04c3136d-217b-4949-b006-2e413d069d91 service nova] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Refreshing instance network info cache due to event network-changed-9b62de64-d50c-4dde-943b-8b68e671b61c. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2903.117475] env[62684]: DEBUG oslo_concurrency.lockutils [req-7bd4fc64-c4fc-4a58-ba44-05ba85430109 req-04c3136d-217b-4949-b006-2e413d069d91 service nova] Acquiring lock "refresh_cache-435a33ca-05df-404e-8b98-a62dece47eba" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2903.117629] env[62684]: DEBUG oslo_concurrency.lockutils [req-7bd4fc64-c4fc-4a58-ba44-05ba85430109 req-04c3136d-217b-4949-b006-2e413d069d91 service nova] Acquired lock "refresh_cache-435a33ca-05df-404e-8b98-a62dece47eba" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2903.117789] env[62684]: DEBUG nova.network.neutron [req-7bd4fc64-c4fc-4a58-ba44-05ba85430109 req-04c3136d-217b-4949-b006-2e413d069d91 service nova] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Refreshing network info cache for port 9b62de64-d50c-4dde-943b-8b68e671b61c {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2903.408057] env[62684]: DEBUG oslo_vmware.api [-] Task: {'id': task-2054040, 'name': CreateVM_Task, 'duration_secs': 0.334772} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2903.408434] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Created VM on the ESX host {{(pid=62684) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2903.408877] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': True, 'disk_bus': None, 'boot_index': 0, 'device_type': None, 'attachment_id': '0f843322-af31-41c2-962b-78f182742ca9', 'guest_format': None, 'mount_device': '/dev/sda', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421452', 'volume_id': '918c1ac0-fd46-4303-b416-1e4b1db78a3b', 'name': 'volume-918c1ac0-fd46-4303-b416-1e4b1db78a3b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '435a33ca-05df-404e-8b98-a62dece47eba', 'attached_at': '', 'detached_at': '', 'volume_id': '918c1ac0-fd46-4303-b416-1e4b1db78a3b', 'serial': '918c1ac0-fd46-4303-b416-1e4b1db78a3b'}, 'volume_type': None}], 'swap': None} {{(pid=62684) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 2903.409115] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Root volume attach. Driver type: vmdk {{(pid=62684) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 2903.409870] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb1d052a-2cb1-48bc-8e5f-f0a1a9745479 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2903.417238] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-828fa16d-3ae9-47f4-b255-af2cbe0d4ff2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2903.422766] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c3c35ed-d976-4406-b40d-a05736d2ef4e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2903.428323] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-6176d450-55c7-4e2d-9579-1c065f1c3823 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2903.435464] env[62684]: DEBUG oslo_vmware.api [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Waiting for the task: (returnval){ [ 2903.435464] env[62684]: value = "task-2054041" [ 2903.435464] env[62684]: _type = "Task" [ 2903.435464] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2903.443051] env[62684]: DEBUG oslo_vmware.api [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Task: {'id': task-2054041, 'name': RelocateVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2903.807286] env[62684]: DEBUG nova.network.neutron [req-7bd4fc64-c4fc-4a58-ba44-05ba85430109 req-04c3136d-217b-4949-b006-2e413d069d91 service nova] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Updated VIF entry in instance network info cache for port 9b62de64-d50c-4dde-943b-8b68e671b61c. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2903.807662] env[62684]: DEBUG nova.network.neutron [req-7bd4fc64-c4fc-4a58-ba44-05ba85430109 req-04c3136d-217b-4949-b006-2e413d069d91 service nova] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Updating instance_info_cache with network_info: [{"id": "9b62de64-d50c-4dde-943b-8b68e671b61c", "address": "fa:16:3e:55:7d:fa", "network": {"id": "62056c7a-56ed-44de-b713-cc7a05fd5072", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1112290977-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "326156690fbe48ac93781ce4603e5ccb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7bdf594e-da7a-4254-b413-87aef4614588", "external-id": "nsx-vlan-transportzone-422", "segmentation_id": 422, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b62de64-d5", "ovs_interfaceid": "9b62de64-d50c-4dde-943b-8b68e671b61c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2903.944887] env[62684]: DEBUG oslo_vmware.api [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Task: {'id': task-2054041, 'name': RelocateVM_Task, 'duration_secs': 0.332589} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2903.945186] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Volume attach. Driver type: vmdk {{(pid=62684) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2903.945413] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421452', 'volume_id': '918c1ac0-fd46-4303-b416-1e4b1db78a3b', 'name': 'volume-918c1ac0-fd46-4303-b416-1e4b1db78a3b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '435a33ca-05df-404e-8b98-a62dece47eba', 'attached_at': '', 'detached_at': '', 'volume_id': '918c1ac0-fd46-4303-b416-1e4b1db78a3b', 'serial': '918c1ac0-fd46-4303-b416-1e4b1db78a3b'} {{(pid=62684) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2903.946159] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b27d92-d3dd-4636-9087-f255e09c3c2b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2903.961495] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94e9cf27-a31a-4f5b-811e-b1b90883cf51 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2903.984244] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Reconfiguring VM instance instance-0000007d to attach disk [datastore1] volume-918c1ac0-fd46-4303-b416-1e4b1db78a3b/volume-918c1ac0-fd46-4303-b416-1e4b1db78a3b.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2903.984578] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8f6e2d12-92d7-46a7-9f0e-add20aead4f8 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2904.003947] env[62684]: DEBUG oslo_vmware.api [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Waiting for the task: (returnval){ [ 2904.003947] env[62684]: value = "task-2054042" [ 2904.003947] env[62684]: _type = "Task" [ 2904.003947] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2904.011338] env[62684]: DEBUG oslo_vmware.api [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Task: {'id': task-2054042, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2904.310620] env[62684]: DEBUG oslo_concurrency.lockutils [req-7bd4fc64-c4fc-4a58-ba44-05ba85430109 req-04c3136d-217b-4949-b006-2e413d069d91 service nova] Releasing lock "refresh_cache-435a33ca-05df-404e-8b98-a62dece47eba" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2904.514517] env[62684]: DEBUG oslo_vmware.api [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Task: {'id': task-2054042, 'name': ReconfigVM_Task, 'duration_secs': 0.247409} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2904.514906] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Reconfigured VM instance instance-0000007d to attach disk [datastore1] volume-918c1ac0-fd46-4303-b416-1e4b1db78a3b/volume-918c1ac0-fd46-4303-b416-1e4b1db78a3b.vmdk or device None with type thin {{(pid=62684) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2904.519467] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-203dc922-764b-4eef-a682-260103d00cd6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2904.534432] env[62684]: DEBUG oslo_vmware.api [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Waiting for the task: (returnval){ [ 2904.534432] env[62684]: value = "task-2054043" [ 2904.534432] env[62684]: _type = "Task" [ 2904.534432] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2904.542282] env[62684]: DEBUG oslo_vmware.api [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Task: {'id': task-2054043, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2905.044406] env[62684]: DEBUG oslo_vmware.api [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Task: {'id': task-2054043, 'name': ReconfigVM_Task, 'duration_secs': 0.122904} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2905.044750] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421452', 'volume_id': '918c1ac0-fd46-4303-b416-1e4b1db78a3b', 'name': 'volume-918c1ac0-fd46-4303-b416-1e4b1db78a3b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '435a33ca-05df-404e-8b98-a62dece47eba', 'attached_at': '', 'detached_at': '', 'volume_id': '918c1ac0-fd46-4303-b416-1e4b1db78a3b', 'serial': '918c1ac0-fd46-4303-b416-1e4b1db78a3b'} {{(pid=62684) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2905.045289] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c944f77d-6419-4a53-87c5-b3c66c8dd9b1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2905.051101] env[62684]: DEBUG oslo_vmware.api [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Waiting for the task: (returnval){ [ 2905.051101] env[62684]: value = "task-2054044" [ 2905.051101] env[62684]: _type = "Task" [ 2905.051101] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2905.058847] env[62684]: DEBUG oslo_vmware.api [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Task: {'id': task-2054044, 'name': Rename_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2905.561369] env[62684]: DEBUG oslo_vmware.api [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Task: {'id': task-2054044, 'name': Rename_Task, 'duration_secs': 0.117736} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2905.561728] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Powering on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2905.561878] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5f2c73f3-d72b-4d1a-9227-80af5e024106 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2905.567350] env[62684]: DEBUG oslo_vmware.api [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Waiting for the task: (returnval){ [ 2905.567350] env[62684]: value = "task-2054045" [ 2905.567350] env[62684]: _type = "Task" [ 2905.567350] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2905.574328] env[62684]: DEBUG oslo_vmware.api [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Task: {'id': task-2054045, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2906.077349] env[62684]: DEBUG oslo_vmware.api [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Task: {'id': task-2054045, 'name': PowerOnVM_Task, 'duration_secs': 0.414696} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2906.077624] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Powered on the VM {{(pid=62684) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2906.077835] env[62684]: INFO nova.compute.manager [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Took 3.46 seconds to spawn the instance on the hypervisor. [ 2906.078034] env[62684]: DEBUG nova.compute.manager [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2906.078792] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-249f2152-5763-4d08-8190-8d860171b9c3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2906.594405] env[62684]: INFO nova.compute.manager [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Took 9.81 seconds to build instance. [ 2907.096122] env[62684]: DEBUG oslo_concurrency.lockutils [None req-17d0822c-452b-4eab-a299-a78ce7b8c365 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Lock "435a33ca-05df-404e-8b98-a62dece47eba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 11.317s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2908.044655] env[62684]: DEBUG nova.compute.manager [req-77d69766-0961-41a4-8681-da199fa593a7 req-44318785-2a88-4457-bb00-c89e4c9d5497 service nova] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Received event network-changed-9b62de64-d50c-4dde-943b-8b68e671b61c {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2908.044944] env[62684]: DEBUG nova.compute.manager [req-77d69766-0961-41a4-8681-da199fa593a7 req-44318785-2a88-4457-bb00-c89e4c9d5497 service nova] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Refreshing instance network info cache due to event network-changed-9b62de64-d50c-4dde-943b-8b68e671b61c. {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2908.045116] env[62684]: DEBUG oslo_concurrency.lockutils [req-77d69766-0961-41a4-8681-da199fa593a7 req-44318785-2a88-4457-bb00-c89e4c9d5497 service nova] Acquiring lock "refresh_cache-435a33ca-05df-404e-8b98-a62dece47eba" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2908.045358] env[62684]: DEBUG oslo_concurrency.lockutils [req-77d69766-0961-41a4-8681-da199fa593a7 req-44318785-2a88-4457-bb00-c89e4c9d5497 service nova] Acquired lock "refresh_cache-435a33ca-05df-404e-8b98-a62dece47eba" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2908.045485] env[62684]: DEBUG nova.network.neutron [req-77d69766-0961-41a4-8681-da199fa593a7 req-44318785-2a88-4457-bb00-c89e4c9d5497 service nova] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Refreshing network info cache for port 9b62de64-d50c-4dde-943b-8b68e671b61c {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2908.785108] env[62684]: DEBUG nova.network.neutron [req-77d69766-0961-41a4-8681-da199fa593a7 req-44318785-2a88-4457-bb00-c89e4c9d5497 service nova] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Updated VIF entry in instance network info cache for port 9b62de64-d50c-4dde-943b-8b68e671b61c. {{(pid=62684) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2908.785518] env[62684]: DEBUG nova.network.neutron [req-77d69766-0961-41a4-8681-da199fa593a7 req-44318785-2a88-4457-bb00-c89e4c9d5497 service nova] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Updating instance_info_cache with network_info: [{"id": "9b62de64-d50c-4dde-943b-8b68e671b61c", "address": "fa:16:3e:55:7d:fa", "network": {"id": "62056c7a-56ed-44de-b713-cc7a05fd5072", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1112290977-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "326156690fbe48ac93781ce4603e5ccb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7bdf594e-da7a-4254-b413-87aef4614588", "external-id": "nsx-vlan-transportzone-422", "segmentation_id": 422, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b62de64-d5", "ovs_interfaceid": "9b62de64-d50c-4dde-943b-8b68e671b61c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2909.288905] env[62684]: DEBUG oslo_concurrency.lockutils [req-77d69766-0961-41a4-8681-da199fa593a7 req-44318785-2a88-4457-bb00-c89e4c9d5497 service nova] Releasing lock "refresh_cache-435a33ca-05df-404e-8b98-a62dece47eba" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2925.300361] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2925.803273] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2925.803527] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2925.806018] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2925.806018] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62684) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2925.806018] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85100a31-e59b-4550-b158-097556e3ec66 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2925.813603] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ad42abf-0dec-4856-b64a-684905ed876e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2925.827302] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35779597-9c6b-4dac-a583-41ab5489e046 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2925.833437] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e937331-2e19-4f99-97af-ccccc698e808 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2925.861314] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181344MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=62684) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2925.861629] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2925.861932] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2926.886274] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Instance 435a33ca-05df-404e-8b98-a62dece47eba actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62684) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2926.886856] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2926.887165] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=62684) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2926.911246] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ad80f42-0c74-42b2-8c96-3282e21945bd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2926.918756] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d31e3ac-9799-4c6b-81a8-bbc7f084d4aa {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2926.948192] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e97baf2e-635a-4edb-9229-15d55f4a1c8a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2926.955049] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3e8f051-6c94-4ab7-9838-68fe1c6674f3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2926.967663] env[62684]: DEBUG nova.compute.provider_tree [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2927.471017] env[62684]: DEBUG nova.scheduler.client.report [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2927.975894] env[62684]: DEBUG nova.compute.resource_tracker [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62684) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2927.976318] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.114s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2932.977325] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2932.977794] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Starting heal instance info cache {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2932.977794] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Rebuilding the list of instances to heal {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2933.547794] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "refresh_cache-435a33ca-05df-404e-8b98-a62dece47eba" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2933.547976] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired lock "refresh_cache-435a33ca-05df-404e-8b98-a62dece47eba" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2933.548156] env[62684]: DEBUG nova.network.neutron [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Forcefully refreshing network info cache for instance {{(pid=62684) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2933.548315] env[62684]: DEBUG nova.objects.instance [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lazy-loading 'info_cache' on Instance uuid 435a33ca-05df-404e-8b98-a62dece47eba {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2935.262406] env[62684]: DEBUG nova.network.neutron [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Updating instance_info_cache with network_info: [{"id": "9b62de64-d50c-4dde-943b-8b68e671b61c", "address": "fa:16:3e:55:7d:fa", "network": {"id": "62056c7a-56ed-44de-b713-cc7a05fd5072", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1112290977-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "326156690fbe48ac93781ce4603e5ccb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7bdf594e-da7a-4254-b413-87aef4614588", "external-id": "nsx-vlan-transportzone-422", "segmentation_id": 422, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b62de64-d5", "ovs_interfaceid": "9b62de64-d50c-4dde-943b-8b68e671b61c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2935.765258] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Releasing lock "refresh_cache-435a33ca-05df-404e-8b98-a62dece47eba" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2935.765444] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Updated the network info_cache for instance {{(pid=62684) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 2935.765614] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2935.765780] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2935.765955] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2935.766129] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2935.766279] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2935.766407] env[62684]: DEBUG nova.compute.manager [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62684) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2939.086023] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2939.300995] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2945.590652] env[62684]: INFO nova.compute.manager [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Rebuilding instance [ 2945.634315] env[62684]: DEBUG nova.compute.manager [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Checking state {{(pid=62684) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2945.635233] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee8dabf9-8b6f-4bec-ba1e-3b4240e53689 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2946.145892] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2946.146229] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a479fd5b-e970-4d04-8fc5-05c1092ac098 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2946.154302] env[62684]: DEBUG oslo_vmware.api [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Waiting for the task: (returnval){ [ 2946.154302] env[62684]: value = "task-2054046" [ 2946.154302] env[62684]: _type = "Task" [ 2946.154302] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2946.161908] env[62684]: DEBUG oslo_vmware.api [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Task: {'id': task-2054046, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2946.664309] env[62684]: DEBUG oslo_vmware.api [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Task: {'id': task-2054046, 'name': PowerOffVM_Task, 'duration_secs': 0.16522} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2946.664618] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Powered off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2946.665265] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Powering off the VM {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2946.665520] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-486df301-8662-407f-a80b-408cb3eac322 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2946.671525] env[62684]: DEBUG oslo_vmware.api [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Waiting for the task: (returnval){ [ 2946.671525] env[62684]: value = "task-2054047" [ 2946.671525] env[62684]: _type = "Task" [ 2946.671525] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2946.682031] env[62684]: DEBUG nova.virt.vmwareapi.vm_util [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] VM already powered off {{(pid=62684) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2946.682236] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Volume detach. Driver type: vmdk {{(pid=62684) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2946.682434] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421452', 'volume_id': '918c1ac0-fd46-4303-b416-1e4b1db78a3b', 'name': 'volume-918c1ac0-fd46-4303-b416-1e4b1db78a3b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '435a33ca-05df-404e-8b98-a62dece47eba', 'attached_at': '', 'detached_at': '', 'volume_id': '918c1ac0-fd46-4303-b416-1e4b1db78a3b', 'serial': '918c1ac0-fd46-4303-b416-1e4b1db78a3b'} {{(pid=62684) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2946.683354] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56b73d61-d9d2-4c12-9a24-ad63913c2f5b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2946.704865] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-351ae3c4-70e4-4f92-84a9-42b87d887b00 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2946.711264] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63dde6b2-5898-4673-b87e-75cf497d29ec {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2946.727882] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d75a80-fca3-4de7-9828-6ee10a60e8a1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2946.741708] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] The volume has not been displaced from its original location: [datastore1] volume-918c1ac0-fd46-4303-b416-1e4b1db78a3b/volume-918c1ac0-fd46-4303-b416-1e4b1db78a3b.vmdk. No consolidation needed. {{(pid=62684) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2946.748995] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Reconfiguring VM instance instance-0000007d to detach disk 2000 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2946.749347] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6cb99170-7daa-49b2-b5be-2930a2640407 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2946.766345] env[62684]: DEBUG oslo_vmware.api [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Waiting for the task: (returnval){ [ 2946.766345] env[62684]: value = "task-2054048" [ 2946.766345] env[62684]: _type = "Task" [ 2946.766345] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2946.776805] env[62684]: DEBUG oslo_vmware.api [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Task: {'id': task-2054048, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2947.276368] env[62684]: DEBUG oslo_vmware.api [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Task: {'id': task-2054048, 'name': ReconfigVM_Task, 'duration_secs': 0.153535} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2947.276646] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Reconfigured VM instance instance-0000007d to detach disk 2000 {{(pid=62684) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2947.281202] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ca9f43f6-ce96-4d0b-aa27-796aa6c00f91 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2947.295491] env[62684]: DEBUG oslo_vmware.api [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Waiting for the task: (returnval){ [ 2947.295491] env[62684]: value = "task-2054049" [ 2947.295491] env[62684]: _type = "Task" [ 2947.295491] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2947.302997] env[62684]: DEBUG oslo_vmware.api [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Task: {'id': task-2054049, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2947.805230] env[62684]: DEBUG oslo_vmware.api [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Task: {'id': task-2054049, 'name': ReconfigVM_Task, 'duration_secs': 0.12357} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2947.805559] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-421452', 'volume_id': '918c1ac0-fd46-4303-b416-1e4b1db78a3b', 'name': 'volume-918c1ac0-fd46-4303-b416-1e4b1db78a3b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '435a33ca-05df-404e-8b98-a62dece47eba', 'attached_at': '', 'detached_at': '', 'volume_id': '918c1ac0-fd46-4303-b416-1e4b1db78a3b', 'serial': '918c1ac0-fd46-4303-b416-1e4b1db78a3b'} {{(pid=62684) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2947.805830] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2947.806580] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bf81892-d18a-4567-a0fb-33400ea15937 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2947.812915] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Unregistering the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2947.813153] env[62684]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c45076fb-0253-4736-b39f-f6263f185ce2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2947.918433] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Unregistered the VM {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2947.918655] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Deleting contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2947.918847] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Deleting the datastore file [datastore1] 435a33ca-05df-404e-8b98-a62dece47eba {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2947.919139] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f517dc13-b0f9-4b3b-9b7c-78c8c7f8973b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2947.925428] env[62684]: DEBUG oslo_vmware.api [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Waiting for the task: (returnval){ [ 2947.925428] env[62684]: value = "task-2054051" [ 2947.925428] env[62684]: _type = "Task" [ 2947.925428] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2947.932970] env[62684]: DEBUG oslo_vmware.api [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Task: {'id': task-2054051, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2948.435792] env[62684]: DEBUG oslo_vmware.api [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Task: {'id': task-2054051, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076205} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2948.436075] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2948.436279] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Deleted contents of the VM from datastore datastore1 {{(pid=62684) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2948.436460] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2948.487657] env[62684]: DEBUG nova.virt.vmwareapi.volumeops [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Volume detach. Driver type: vmdk {{(pid=62684) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2948.488013] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9dccd9d8-3b7e-4a16-96f7-a294f51cb5b0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2948.496952] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edd2215b-9cac-4d8e-9415-98348d83ad29 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2948.521624] env[62684]: ERROR nova.compute.manager [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Failed to detach volume 918c1ac0-fd46-4303-b416-1e4b1db78a3b from /dev/sda: nova.exception.InstanceNotFound: Instance 435a33ca-05df-404e-8b98-a62dece47eba could not be found. [ 2948.521624] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Traceback (most recent call last): [ 2948.521624] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] File "/opt/stack/nova/nova/compute/manager.py", line 4143, in _do_rebuild_instance [ 2948.521624] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] self.driver.rebuild(**kwargs) [ 2948.521624] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] File "/opt/stack/nova/nova/virt/driver.py", line 493, in rebuild [ 2948.521624] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] raise NotImplementedError() [ 2948.521624] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] NotImplementedError [ 2948.521624] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] [ 2948.521624] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] During handling of the above exception, another exception occurred: [ 2948.521624] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] [ 2948.521624] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Traceback (most recent call last): [ 2948.521624] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] File "/opt/stack/nova/nova/compute/manager.py", line 3566, in _detach_root_volume [ 2948.521624] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] self.driver.detach_volume(context, old_connection_info, [ 2948.521624] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 559, in detach_volume [ 2948.521624] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] return self._volumeops.detach_volume(connection_info, instance) [ 2948.521624] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 2948.521624] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] self._detach_volume_vmdk(connection_info, instance) [ 2948.521624] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 2948.521624] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 2948.521624] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 2948.521624] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] stable_ref.fetch_moref(session) [ 2948.521624] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 2948.521624] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] raise exception.InstanceNotFound(instance_id=self._uuid) [ 2948.521624] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] nova.exception.InstanceNotFound: Instance 435a33ca-05df-404e-8b98-a62dece47eba could not be found. [ 2948.521624] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] [ 2948.640816] env[62684]: DEBUG nova.compute.utils [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Build of instance 435a33ca-05df-404e-8b98-a62dece47eba aborted: Failed to rebuild volume backed instance. {{(pid=62684) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2948.643049] env[62684]: ERROR nova.compute.manager [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance 435a33ca-05df-404e-8b98-a62dece47eba aborted: Failed to rebuild volume backed instance. [ 2948.643049] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Traceback (most recent call last): [ 2948.643049] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] File "/opt/stack/nova/nova/compute/manager.py", line 4143, in _do_rebuild_instance [ 2948.643049] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] self.driver.rebuild(**kwargs) [ 2948.643049] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] File "/opt/stack/nova/nova/virt/driver.py", line 493, in rebuild [ 2948.643049] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] raise NotImplementedError() [ 2948.643049] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] NotImplementedError [ 2948.643049] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] [ 2948.643049] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] During handling of the above exception, another exception occurred: [ 2948.643049] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] [ 2948.643049] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Traceback (most recent call last): [ 2948.643049] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] File "/opt/stack/nova/nova/compute/manager.py", line 3601, in _rebuild_volume_backed_instance [ 2948.643049] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] self._detach_root_volume(context, instance, root_bdm) [ 2948.643049] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] File "/opt/stack/nova/nova/compute/manager.py", line 3580, in _detach_root_volume [ 2948.643049] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] with excutils.save_and_reraise_exception(): [ 2948.643049] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2948.643049] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] self.force_reraise() [ 2948.643049] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2948.643049] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] raise self.value [ 2948.643049] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] File "/opt/stack/nova/nova/compute/manager.py", line 3566, in _detach_root_volume [ 2948.643049] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] self.driver.detach_volume(context, old_connection_info, [ 2948.643049] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 559, in detach_volume [ 2948.643049] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] return self._volumeops.detach_volume(connection_info, instance) [ 2948.643049] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 2948.643049] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] self._detach_volume_vmdk(connection_info, instance) [ 2948.643049] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 2948.643049] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 2948.643049] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 2948.643049] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] stable_ref.fetch_moref(session) [ 2948.643049] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 2948.643049] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] raise exception.InstanceNotFound(instance_id=self._uuid) [ 2948.643049] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] nova.exception.InstanceNotFound: Instance 435a33ca-05df-404e-8b98-a62dece47eba could not be found. [ 2948.643049] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] [ 2948.643049] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] During handling of the above exception, another exception occurred: [ 2948.643049] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] [ 2948.643049] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Traceback (most recent call last): [ 2948.643049] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] File "/opt/stack/nova/nova/compute/manager.py", line 10866, in _error_out_instance_on_exception [ 2948.643049] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] yield [ 2948.643049] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] File "/opt/stack/nova/nova/compute/manager.py", line 3869, in rebuild_instance [ 2948.643049] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] self._do_rebuild_instance_with_claim( [ 2948.643999] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] File "/opt/stack/nova/nova/compute/manager.py", line 3955, in _do_rebuild_instance_with_claim [ 2948.643999] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] self._do_rebuild_instance( [ 2948.643999] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] File "/opt/stack/nova/nova/compute/manager.py", line 4147, in _do_rebuild_instance [ 2948.643999] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] self._rebuild_default_impl(**kwargs) [ 2948.643999] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] File "/opt/stack/nova/nova/compute/manager.py", line 3724, in _rebuild_default_impl [ 2948.643999] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] self._rebuild_volume_backed_instance( [ 2948.643999] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] File "/opt/stack/nova/nova/compute/manager.py", line 3616, in _rebuild_volume_backed_instance [ 2948.643999] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] raise exception.BuildAbortException( [ 2948.643999] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] nova.exception.BuildAbortException: Build of instance 435a33ca-05df-404e-8b98-a62dece47eba aborted: Failed to rebuild volume backed instance. [ 2948.643999] env[62684]: ERROR nova.compute.manager [instance: 435a33ca-05df-404e-8b98-a62dece47eba] [ 2950.658156] env[62684]: DEBUG oslo_concurrency.lockutils [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2950.658463] env[62684]: DEBUG oslo_concurrency.lockutils [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2950.672916] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d50e8579-202d-4ece-879a-ee69e4978e0b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2950.680349] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85ac2b67-7fd6-4c70-883b-9cf7e8488b68 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2950.710259] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4800cd60-a693-4b08-a084-c8983c088595 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2950.717046] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35f49823-d8ea-47c5-a7bf-8c1e95b510ee {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2950.729626] env[62684]: DEBUG nova.compute.provider_tree [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2951.232591] env[62684]: DEBUG nova.scheduler.client.report [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2951.433636] env[62684]: DEBUG oslo_concurrency.lockutils [None req-57553175-b9e8-412e-b61c-f7f86a9c579e tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Acquiring lock "435a33ca-05df-404e-8b98-a62dece47eba" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2951.433917] env[62684]: DEBUG oslo_concurrency.lockutils [None req-57553175-b9e8-412e-b61c-f7f86a9c579e tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Lock "435a33ca-05df-404e-8b98-a62dece47eba" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2951.434199] env[62684]: DEBUG oslo_concurrency.lockutils [None req-57553175-b9e8-412e-b61c-f7f86a9c579e tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Acquiring lock "435a33ca-05df-404e-8b98-a62dece47eba-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2951.434447] env[62684]: DEBUG oslo_concurrency.lockutils [None req-57553175-b9e8-412e-b61c-f7f86a9c579e tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Lock "435a33ca-05df-404e-8b98-a62dece47eba-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2951.434633] env[62684]: DEBUG oslo_concurrency.lockutils [None req-57553175-b9e8-412e-b61c-f7f86a9c579e tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Lock "435a33ca-05df-404e-8b98-a62dece47eba-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2951.436756] env[62684]: INFO nova.compute.manager [None req-57553175-b9e8-412e-b61c-f7f86a9c579e tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Terminating instance [ 2951.438668] env[62684]: DEBUG nova.compute.manager [None req-57553175-b9e8-412e-b61c-f7f86a9c579e tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Start destroying the instance on the hypervisor. {{(pid=62684) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2951.438960] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9ad749cb-2150-4acc-84f9-252a3ebe4397 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2951.448415] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ef75e71-87a7-49af-a284-6ae7de4b16b9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2951.471289] env[62684]: WARNING nova.virt.vmwareapi.driver [None req-57553175-b9e8-412e-b61c-f7f86a9c579e tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 435a33ca-05df-404e-8b98-a62dece47eba could not be found. [ 2951.471489] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-57553175-b9e8-412e-b61c-f7f86a9c579e tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Destroying instance {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2951.471758] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-94e5d86a-1b77-44fd-80cd-b8f1a075faab {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2951.479662] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1a49058-0d6c-4110-ad87-a59924c2ccd1 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2951.501836] env[62684]: WARNING nova.virt.vmwareapi.vmops [None req-57553175-b9e8-412e-b61c-f7f86a9c579e tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 435a33ca-05df-404e-8b98-a62dece47eba could not be found. [ 2951.501990] env[62684]: DEBUG nova.virt.vmwareapi.vmops [None req-57553175-b9e8-412e-b61c-f7f86a9c579e tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Instance destroyed {{(pid=62684) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2951.502188] env[62684]: INFO nova.compute.manager [None req-57553175-b9e8-412e-b61c-f7f86a9c579e tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Took 0.06 seconds to destroy the instance on the hypervisor. [ 2951.502423] env[62684]: DEBUG oslo.service.loopingcall [None req-57553175-b9e8-412e-b61c-f7f86a9c579e tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62684) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2951.502630] env[62684]: DEBUG nova.compute.manager [-] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Deallocating network for instance {{(pid=62684) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2951.502726] env[62684]: DEBUG nova.network.neutron [-] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] deallocate_for_instance() {{(pid=62684) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2951.737418] env[62684]: DEBUG oslo_concurrency.lockutils [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.079s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2951.737736] env[62684]: INFO nova.compute.manager [None req-18297cce-7481-4fbb-a521-1bab0c238227 tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Successfully reverted task state from rebuilding on failure for instance. [ 2951.969960] env[62684]: DEBUG nova.compute.manager [req-34977103-fb94-4187-88f2-76bbf0d1bd18 req-ddf9c43e-b338-4b2f-84bb-6e2cac378c6c service nova] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Received event network-vif-deleted-9b62de64-d50c-4dde-943b-8b68e671b61c {{(pid=62684) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2951.970323] env[62684]: INFO nova.compute.manager [req-34977103-fb94-4187-88f2-76bbf0d1bd18 req-ddf9c43e-b338-4b2f-84bb-6e2cac378c6c service nova] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Neutron deleted interface 9b62de64-d50c-4dde-943b-8b68e671b61c; detaching it from the instance and deleting it from the info cache [ 2951.970533] env[62684]: DEBUG nova.network.neutron [req-34977103-fb94-4187-88f2-76bbf0d1bd18 req-ddf9c43e-b338-4b2f-84bb-6e2cac378c6c service nova] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2952.423719] env[62684]: DEBUG nova.network.neutron [-] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Updating instance_info_cache with network_info: [] {{(pid=62684) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2952.473436] env[62684]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2dbf153c-a329-4332-b0c4-2f01246990ce {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2952.482859] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7621b7d-b6d0-45f0-8563-24ec7ca484fd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2952.508546] env[62684]: DEBUG nova.compute.manager [req-34977103-fb94-4187-88f2-76bbf0d1bd18 req-ddf9c43e-b338-4b2f-84bb-6e2cac378c6c service nova] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Detach interface failed, port_id=9b62de64-d50c-4dde-943b-8b68e671b61c, reason: Instance 435a33ca-05df-404e-8b98-a62dece47eba could not be found. {{(pid=62684) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2952.927111] env[62684]: INFO nova.compute.manager [-] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Took 1.42 seconds to deallocate network for instance. [ 2953.473499] env[62684]: INFO nova.compute.manager [None req-57553175-b9e8-412e-b61c-f7f86a9c579e tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Took 0.55 seconds to detach 1 volumes for instance. [ 2953.476075] env[62684]: DEBUG nova.compute.manager [None req-57553175-b9e8-412e-b61c-f7f86a9c579e tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] [instance: 435a33ca-05df-404e-8b98-a62dece47eba] Deleting volume: 918c1ac0-fd46-4303-b416-1e4b1db78a3b {{(pid=62684) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3248}} [ 2954.021609] env[62684]: DEBUG oslo_concurrency.lockutils [None req-57553175-b9e8-412e-b61c-f7f86a9c579e tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2954.022069] env[62684]: DEBUG oslo_concurrency.lockutils [None req-57553175-b9e8-412e-b61c-f7f86a9c579e tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2954.022401] env[62684]: DEBUG nova.objects.instance [None req-57553175-b9e8-412e-b61c-f7f86a9c579e tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Lazy-loading 'resources' on Instance uuid 435a33ca-05df-404e-8b98-a62dece47eba {{(pid=62684) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2954.540602] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6db7ad9f-77a8-4d52-8ea3-af966654abd7 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2954.549174] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7507275-c34d-400b-8911-9afdf0eccbdd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2954.577579] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f2610c4-8398-4597-a17b-8bf2703d84ec {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2954.584255] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc887287-8984-4e25-8660-3531b370c1ea {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2954.596854] env[62684]: DEBUG nova.compute.provider_tree [None req-57553175-b9e8-412e-b61c-f7f86a9c579e tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Inventory has not changed in ProviderTree for provider: c23c281e-ec1f-4876-972e-a98655f2084f {{(pid=62684) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2955.099909] env[62684]: DEBUG nova.scheduler.client.report [None req-57553175-b9e8-412e-b61c-f7f86a9c579e tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Inventory has not changed for provider c23c281e-ec1f-4876-972e-a98655f2084f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62684) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2955.606523] env[62684]: DEBUG oslo_concurrency.lockutils [None req-57553175-b9e8-412e-b61c-f7f86a9c579e tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.584s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2956.126045] env[62684]: DEBUG oslo_concurrency.lockutils [None req-57553175-b9e8-412e-b61c-f7f86a9c579e tempest-ServerActionsV293TestJSON-976133119 tempest-ServerActionsV293TestJSON-976133119-project-member] Lock "435a33ca-05df-404e-8b98-a62dece47eba" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.692s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2956.300543] env[62684]: DEBUG oslo_service.periodic_task [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Running periodic task ComputeManager._run_image_cache_manager_pass {{(pid=62684) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2956.300852] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "storage-registry-lock" by "nova.virt.storage_users.register_storage_use..do_register_storage_use" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2956.301434] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "storage-registry-lock" acquired by "nova.virt.storage_users.register_storage_use..do_register_storage_use" :: waited 0.001s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2956.301737] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "storage-registry-lock" "released" by "nova.virt.storage_users.register_storage_use..do_register_storage_use" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2956.301879] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "storage-registry-lock" by "nova.virt.storage_users.get_storage_users..do_get_storage_users" {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2956.302159] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "storage-registry-lock" acquired by "nova.virt.storage_users.get_storage_users..do_get_storage_users" :: waited 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2956.302408] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Lock "storage-registry-lock" "released" by "nova.virt.storage_users.get_storage_users..do_get_storage_users" :: held 0.000s {{(pid=62684) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2956.805728] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90fd5e8f-9eb4-4ada-9ce6-d958c4daf72b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2956.814139] env[62684]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aead6db-7731-46c9-b4d3-78d519a0e71f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2957.329684] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee762624-742a-4215-bf73-90463d029c74 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2957.335636] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Waiting for the task: (returnval){ [ 2957.335636] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5278110f-a8db-baef-abfb-7df2df545990" [ 2957.335636] env[62684]: _type = "Task" [ 2957.335636] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2957.343199] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5278110f-a8db-baef-abfb-7df2df545990, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2957.902549] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5278110f-a8db-baef-abfb-7df2df545990, 'name': SearchDatastore_Task, 'duration_secs': 0.221541} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2957.903097] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "[datastore1] devstack-image-cache_base/223b06b2-7e33-4368-80a9-86a9a6e92ed1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2957.903235] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired lock "[datastore1] devstack-image-cache_base/223b06b2-7e33-4368-80a9-86a9a6e92ed1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2957.903565] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/223b06b2-7e33-4368-80a9-86a9a6e92ed1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2957.903997] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f19052d4-d7da-4160-a7fe-5785ed6bb254 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2957.908587] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Waiting for the task: (returnval){ [ 2957.908587] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c1b103-cacc-2290-35e9-5f8717219ffd" [ 2957.908587] env[62684]: _type = "Task" [ 2957.908587] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2957.916198] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c1b103-cacc-2290-35e9-5f8717219ffd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2958.417821] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52c1b103-cacc-2290-35e9-5f8717219ffd, 'name': SearchDatastore_Task, 'duration_secs': 0.010344} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2958.418116] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Creating directory with path [datastore1] devstack-image-cache_base/223b06b2-7e33-4368-80a9-86a9a6e92ed1/ts-2025-01-10-08-06-39 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2958.418392] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6f36fe88-a094-428c-9d73-27f7abfdbe5f {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2958.430514] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Created directory with path [datastore1] devstack-image-cache_base/223b06b2-7e33-4368-80a9-86a9a6e92ed1/ts-2025-01-10-08-06-39 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2958.430680] env[62684]: INFO nova.virt.vmwareapi.imagecache [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Image 223b06b2-7e33-4368-80a9-86a9a6e92ed1 is no longer used by this node. Pending deletion! [ 2958.430808] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Releasing lock "[datastore1] devstack-image-cache_base/223b06b2-7e33-4368-80a9-86a9a6e92ed1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2958.431016] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "[datastore1] devstack-image-cache_base/091db551-69cb-4fb9-87e3-557d41573f5e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2958.431142] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired lock "[datastore1] devstack-image-cache_base/091db551-69cb-4fb9-87e3-557d41573f5e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2958.431436] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/091db551-69cb-4fb9-87e3-557d41573f5e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2958.431675] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4079350b-f1ea-456a-b520-3aaf2d4e6c01 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2958.436161] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Waiting for the task: (returnval){ [ 2958.436161] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523bbc6d-4ccc-b21d-87e5-9de825bc8314" [ 2958.436161] env[62684]: _type = "Task" [ 2958.436161] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2958.443360] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523bbc6d-4ccc-b21d-87e5-9de825bc8314, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2958.947417] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]523bbc6d-4ccc-b21d-87e5-9de825bc8314, 'name': SearchDatastore_Task, 'duration_secs': 0.01063} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2958.948325] env[62684]: INFO nova.virt.vmwareapi.imagecache [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Image [datastore1] devstack-image-cache_base/091db551-69cb-4fb9-87e3-557d41573f5e is no longer used. Deleting! [ 2958.948475] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/091db551-69cb-4fb9-87e3-557d41573f5e {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2958.948750] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-00ebb8f1-cd28-4621-88e4-cab801153d7b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2958.954127] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Waiting for the task: (returnval){ [ 2958.954127] env[62684]: value = "task-2054053" [ 2958.954127] env[62684]: _type = "Task" [ 2958.954127] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2958.961342] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': task-2054053, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2959.464130] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': task-2054053, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.115282} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2959.464472] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2959.464521] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Releasing lock "[datastore1] devstack-image-cache_base/091db551-69cb-4fb9-87e3-557d41573f5e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2959.464711] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "[datastore1] devstack-image-cache_base/87dfb7c8-5a2c-49ff-993b-2dde5b220c19" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2959.464823] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired lock "[datastore1] devstack-image-cache_base/87dfb7c8-5a2c-49ff-993b-2dde5b220c19" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2959.465175] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/87dfb7c8-5a2c-49ff-993b-2dde5b220c19" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2959.465460] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-417f3d93-ece1-4308-8d10-96583c181e1c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2959.469975] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Waiting for the task: (returnval){ [ 2959.469975] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]522fa18d-6726-d329-9c6d-79177a35348b" [ 2959.469975] env[62684]: _type = "Task" [ 2959.469975] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2959.478047] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]522fa18d-6726-d329-9c6d-79177a35348b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2959.980542] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]522fa18d-6726-d329-9c6d-79177a35348b, 'name': SearchDatastore_Task, 'duration_secs': 0.009287} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2959.980793] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Creating directory with path [datastore1] devstack-image-cache_base/87dfb7c8-5a2c-49ff-993b-2dde5b220c19/ts-2025-01-10-08-06-40 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2959.981081] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8797ae2e-589c-4862-8b73-c276a86ddfc4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2959.994078] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Created directory with path [datastore1] devstack-image-cache_base/87dfb7c8-5a2c-49ff-993b-2dde5b220c19/ts-2025-01-10-08-06-40 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2959.994234] env[62684]: INFO nova.virt.vmwareapi.imagecache [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Image 87dfb7c8-5a2c-49ff-993b-2dde5b220c19 is no longer used by this node. Pending deletion! [ 2959.994419] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Releasing lock "[datastore1] devstack-image-cache_base/87dfb7c8-5a2c-49ff-993b-2dde5b220c19" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2959.994567] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "[datastore1] devstack-image-cache_base/c9875372-54d8-4078-8181-021c1a4f983a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2959.994679] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired lock "[datastore1] devstack-image-cache_base/c9875372-54d8-4078-8181-021c1a4f983a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2959.995009] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9875372-54d8-4078-8181-021c1a4f983a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2959.995276] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19a3575f-d123-4aff-ae97-b8302ac567f6 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2959.999520] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Waiting for the task: (returnval){ [ 2959.999520] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52826c3c-160b-01d8-5c60-d5ba9faf030c" [ 2959.999520] env[62684]: _type = "Task" [ 2959.999520] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2960.006883] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52826c3c-160b-01d8-5c60-d5ba9faf030c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2960.510113] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52826c3c-160b-01d8-5c60-d5ba9faf030c, 'name': SearchDatastore_Task, 'duration_secs': 0.008123} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2960.510450] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Creating directory with path [datastore1] devstack-image-cache_base/c9875372-54d8-4078-8181-021c1a4f983a/ts-2025-01-10-08-06-41 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2960.510601] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5359ac41-e33a-4ddf-90dc-bd03c7c39d2c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2960.522961] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Created directory with path [datastore1] devstack-image-cache_base/c9875372-54d8-4078-8181-021c1a4f983a/ts-2025-01-10-08-06-41 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2960.523105] env[62684]: INFO nova.virt.vmwareapi.imagecache [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Image c9875372-54d8-4078-8181-021c1a4f983a is no longer used by this node. Pending deletion! [ 2960.523266] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Releasing lock "[datastore1] devstack-image-cache_base/c9875372-54d8-4078-8181-021c1a4f983a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2960.523475] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "[datastore1] devstack-image-cache_base/8337f75f-b270-4b19-8b09-2a31e8e43b6e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2960.523588] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired lock "[datastore1] devstack-image-cache_base/8337f75f-b270-4b19-8b09-2a31e8e43b6e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2960.523852] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/8337f75f-b270-4b19-8b09-2a31e8e43b6e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2960.524125] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b2f8341-3d92-4ae4-9405-747aecd3bea4 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2960.528228] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Waiting for the task: (returnval){ [ 2960.528228] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5285199d-f97e-e6a7-5d04-5a428b0ebe0f" [ 2960.528228] env[62684]: _type = "Task" [ 2960.528228] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2960.535430] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5285199d-f97e-e6a7-5d04-5a428b0ebe0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2961.037976] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5285199d-f97e-e6a7-5d04-5a428b0ebe0f, 'name': SearchDatastore_Task, 'duration_secs': 0.007975} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2961.038256] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Creating directory with path [datastore1] devstack-image-cache_base/8337f75f-b270-4b19-8b09-2a31e8e43b6e/ts-2025-01-10-08-06-41 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2961.038530] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f22dc09d-c448-4c51-be01-05b8b6648ebb {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2961.050458] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Created directory with path [datastore1] devstack-image-cache_base/8337f75f-b270-4b19-8b09-2a31e8e43b6e/ts-2025-01-10-08-06-41 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2961.050606] env[62684]: INFO nova.virt.vmwareapi.imagecache [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Image 8337f75f-b270-4b19-8b09-2a31e8e43b6e is no longer used by this node. Pending deletion! [ 2961.050751] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Releasing lock "[datastore1] devstack-image-cache_base/8337f75f-b270-4b19-8b09-2a31e8e43b6e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2961.050955] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "[datastore1] devstack-image-cache_base/b8983a52-6b16-4416-b5c0-b336c3075123" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2961.051084] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired lock "[datastore1] devstack-image-cache_base/b8983a52-6b16-4416-b5c0-b336c3075123" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2961.051403] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/b8983a52-6b16-4416-b5c0-b336c3075123" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2961.051653] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3340435b-39dc-4337-9be6-4b238e6f9b70 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2961.055472] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Waiting for the task: (returnval){ [ 2961.055472] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5289652b-893b-3be8-2188-93390fd16f2f" [ 2961.055472] env[62684]: _type = "Task" [ 2961.055472] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2961.062704] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5289652b-893b-3be8-2188-93390fd16f2f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2961.565701] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5289652b-893b-3be8-2188-93390fd16f2f, 'name': SearchDatastore_Task, 'duration_secs': 0.010628} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2961.565983] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Creating directory with path [datastore1] devstack-image-cache_base/b8983a52-6b16-4416-b5c0-b336c3075123/ts-2025-01-10-08-06-42 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2961.566241] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-66967c0a-cfd7-4161-9dc9-fa106224eecd {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2961.578750] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Created directory with path [datastore1] devstack-image-cache_base/b8983a52-6b16-4416-b5c0-b336c3075123/ts-2025-01-10-08-06-42 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2961.578883] env[62684]: INFO nova.virt.vmwareapi.imagecache [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Image b8983a52-6b16-4416-b5c0-b336c3075123 is no longer used by this node. Pending deletion! [ 2961.579060] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Releasing lock "[datastore1] devstack-image-cache_base/b8983a52-6b16-4416-b5c0-b336c3075123" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2961.579271] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "[datastore1] devstack-image-cache_base/5e13c96f-897e-4453-b568-016623145a51" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2961.579386] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired lock "[datastore1] devstack-image-cache_base/5e13c96f-897e-4453-b568-016623145a51" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2961.579682] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/5e13c96f-897e-4453-b568-016623145a51" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2961.579913] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da70545c-dcb5-46c1-b7cb-724039451d9b {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2961.583784] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Waiting for the task: (returnval){ [ 2961.583784] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529ec165-9d50-1ba2-5e25-5aff5e69f82e" [ 2961.583784] env[62684]: _type = "Task" [ 2961.583784] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2961.591010] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529ec165-9d50-1ba2-5e25-5aff5e69f82e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2962.094174] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529ec165-9d50-1ba2-5e25-5aff5e69f82e, 'name': SearchDatastore_Task, 'duration_secs': 0.008492} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2962.094438] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Creating directory with path [datastore1] devstack-image-cache_base/5e13c96f-897e-4453-b568-016623145a51/ts-2025-01-10-08-06-42 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2962.094709] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ed49350a-5696-41da-a6b9-5683cd90dd90 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2962.106552] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Created directory with path [datastore1] devstack-image-cache_base/5e13c96f-897e-4453-b568-016623145a51/ts-2025-01-10-08-06-42 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2962.106687] env[62684]: INFO nova.virt.vmwareapi.imagecache [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Image 5e13c96f-897e-4453-b568-016623145a51 is no longer used by this node. Pending deletion! [ 2962.106845] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Releasing lock "[datastore1] devstack-image-cache_base/5e13c96f-897e-4453-b568-016623145a51" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2962.107100] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "[datastore1] devstack-image-cache_base/22a7e9ca-a5cc-474a-9835-9c1710c40b7e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2962.107191] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired lock "[datastore1] devstack-image-cache_base/22a7e9ca-a5cc-474a-9835-9c1710c40b7e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2962.107484] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/22a7e9ca-a5cc-474a-9835-9c1710c40b7e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2962.107717] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a70a9b9-6e01-4461-8bb7-2010468452ed {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2962.112331] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Waiting for the task: (returnval){ [ 2962.112331] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a1443d-0fc0-7b29-efae-5cd2ee923adf" [ 2962.112331] env[62684]: _type = "Task" [ 2962.112331] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2962.119127] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a1443d-0fc0-7b29-efae-5cd2ee923adf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2962.623071] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a1443d-0fc0-7b29-efae-5cd2ee923adf, 'name': SearchDatastore_Task, 'duration_secs': 0.007776} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2962.623421] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Creating directory with path [datastore1] devstack-image-cache_base/22a7e9ca-a5cc-474a-9835-9c1710c40b7e/ts-2025-01-10-08-06-43 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2962.623576] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-47240b19-93a6-4aa8-927f-71373d2613ca {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2962.635022] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Created directory with path [datastore1] devstack-image-cache_base/22a7e9ca-a5cc-474a-9835-9c1710c40b7e/ts-2025-01-10-08-06-43 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2962.635169] env[62684]: INFO nova.virt.vmwareapi.imagecache [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Image 22a7e9ca-a5cc-474a-9835-9c1710c40b7e is no longer used by this node. Pending deletion! [ 2962.635323] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Releasing lock "[datastore1] devstack-image-cache_base/22a7e9ca-a5cc-474a-9835-9c1710c40b7e" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2962.635531] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "[datastore1] devstack-image-cache_base/763c94f6-328d-447a-b903-bb766045e6ae" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2962.635655] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired lock "[datastore1] devstack-image-cache_base/763c94f6-328d-447a-b903-bb766045e6ae" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2962.635947] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/763c94f6-328d-447a-b903-bb766045e6ae" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2962.636202] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79bc1ac6-70a7-4755-98e1-31d33ef9d05e {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2962.640167] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Waiting for the task: (returnval){ [ 2962.640167] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5268793a-5519-5e5b-2ca5-b36af657dbd9" [ 2962.640167] env[62684]: _type = "Task" [ 2962.640167] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2962.647595] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5268793a-5519-5e5b-2ca5-b36af657dbd9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2963.151104] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5268793a-5519-5e5b-2ca5-b36af657dbd9, 'name': SearchDatastore_Task, 'duration_secs': 0.007813} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2963.151443] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Creating directory with path [datastore1] devstack-image-cache_base/763c94f6-328d-447a-b903-bb766045e6ae/ts-2025-01-10-08-06-44 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2963.151758] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1415fe85-0c4b-4c64-8b68-95c42973ba4c {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2963.164079] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Created directory with path [datastore1] devstack-image-cache_base/763c94f6-328d-447a-b903-bb766045e6ae/ts-2025-01-10-08-06-44 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2963.164213] env[62684]: INFO nova.virt.vmwareapi.imagecache [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Image 763c94f6-328d-447a-b903-bb766045e6ae is no longer used by this node. Pending deletion! [ 2963.164375] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Releasing lock "[datastore1] devstack-image-cache_base/763c94f6-328d-447a-b903-bb766045e6ae" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2963.164582] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "[datastore1] devstack-image-cache_base/f0cfa2a6-883e-45e1-b375-4dcd9504b696" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2963.164717] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired lock "[datastore1] devstack-image-cache_base/f0cfa2a6-883e-45e1-b375-4dcd9504b696" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2963.165046] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/f0cfa2a6-883e-45e1-b375-4dcd9504b696" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2963.165315] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e79a0cb-c79a-45be-a26e-c3f2b85086de {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2963.169912] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Waiting for the task: (returnval){ [ 2963.169912] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e267f9-7ec2-6c1c-7ca6-9756c55f7de6" [ 2963.169912] env[62684]: _type = "Task" [ 2963.169912] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2963.177337] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e267f9-7ec2-6c1c-7ca6-9756c55f7de6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2963.680107] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52e267f9-7ec2-6c1c-7ca6-9756c55f7de6, 'name': SearchDatastore_Task, 'duration_secs': 0.008572} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2963.680419] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Creating directory with path [datastore1] devstack-image-cache_base/f0cfa2a6-883e-45e1-b375-4dcd9504b696/ts-2025-01-10-08-06-44 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2963.680636] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f120a73b-2a05-4674-9fcc-c3198901e2a5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2963.692392] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Created directory with path [datastore1] devstack-image-cache_base/f0cfa2a6-883e-45e1-b375-4dcd9504b696/ts-2025-01-10-08-06-44 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2963.692554] env[62684]: INFO nova.virt.vmwareapi.imagecache [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Image f0cfa2a6-883e-45e1-b375-4dcd9504b696 is no longer used by this node. Pending deletion! [ 2963.692677] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Releasing lock "[datastore1] devstack-image-cache_base/f0cfa2a6-883e-45e1-b375-4dcd9504b696" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2963.692883] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "[datastore1] devstack-image-cache_base/982d88ef-0928-426c-8c25-d063a6eeb91a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2963.692995] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired lock "[datastore1] devstack-image-cache_base/982d88ef-0928-426c-8c25-d063a6eeb91a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2963.693311] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/982d88ef-0928-426c-8c25-d063a6eeb91a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2963.693560] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6f1d9cb-11de-4447-abc8-fb9bf695e19d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2963.697502] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Waiting for the task: (returnval){ [ 2963.697502] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52fa29c7-d35d-ccc9-fa6a-5e8b9456409b" [ 2963.697502] env[62684]: _type = "Task" [ 2963.697502] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2963.704565] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52fa29c7-d35d-ccc9-fa6a-5e8b9456409b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2964.207951] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52fa29c7-d35d-ccc9-fa6a-5e8b9456409b, 'name': SearchDatastore_Task, 'duration_secs': 0.007891} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2964.208238] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Creating directory with path [datastore1] devstack-image-cache_base/982d88ef-0928-426c-8c25-d063a6eeb91a/ts-2025-01-10-08-06-45 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2964.208534] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4c61ead5-eae2-437e-b465-fe5634edd97d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2964.221306] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Created directory with path [datastore1] devstack-image-cache_base/982d88ef-0928-426c-8c25-d063a6eeb91a/ts-2025-01-10-08-06-45 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2964.221441] env[62684]: INFO nova.virt.vmwareapi.imagecache [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Image 982d88ef-0928-426c-8c25-d063a6eeb91a is no longer used by this node. Pending deletion! [ 2964.221594] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Releasing lock "[datastore1] devstack-image-cache_base/982d88ef-0928-426c-8c25-d063a6eeb91a" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2964.221801] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "[datastore1] devstack-image-cache_base/e866b2cc-a390-44e7-bb08-1bf9ef73b97c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2964.221912] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired lock "[datastore1] devstack-image-cache_base/e866b2cc-a390-44e7-bb08-1bf9ef73b97c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2964.222242] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/e866b2cc-a390-44e7-bb08-1bf9ef73b97c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2964.222502] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6008997-c640-4c55-8ff6-420eb334c901 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2964.226676] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Waiting for the task: (returnval){ [ 2964.226676] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529483f7-6384-b09f-2d3f-e976ff911c0e" [ 2964.226676] env[62684]: _type = "Task" [ 2964.226676] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2964.234121] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529483f7-6384-b09f-2d3f-e976ff911c0e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2964.736609] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529483f7-6384-b09f-2d3f-e976ff911c0e, 'name': SearchDatastore_Task, 'duration_secs': 0.009231} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2964.737027] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Creating directory with path [datastore1] devstack-image-cache_base/e866b2cc-a390-44e7-bb08-1bf9ef73b97c/ts-2025-01-10-08-06-45 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2964.737191] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3bb3b95c-3966-4679-9b5a-8838a793cbab {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2964.749400] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Created directory with path [datastore1] devstack-image-cache_base/e866b2cc-a390-44e7-bb08-1bf9ef73b97c/ts-2025-01-10-08-06-45 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2964.749536] env[62684]: INFO nova.virt.vmwareapi.imagecache [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Image e866b2cc-a390-44e7-bb08-1bf9ef73b97c is no longer used by this node. Pending deletion! [ 2964.749691] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Releasing lock "[datastore1] devstack-image-cache_base/e866b2cc-a390-44e7-bb08-1bf9ef73b97c" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2964.749898] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "[datastore1] devstack-image-cache_base/017a863d-fd69-48d7-b76e-f0ef35837499" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2964.750016] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired lock "[datastore1] devstack-image-cache_base/017a863d-fd69-48d7-b76e-f0ef35837499" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2964.750321] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/017a863d-fd69-48d7-b76e-f0ef35837499" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2964.750564] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f407bee-ac66-4b08-b30d-728d8c1c7514 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2964.754521] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Waiting for the task: (returnval){ [ 2964.754521] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5266b618-3ac2-bdc3-e627-b9804a1e2818" [ 2964.754521] env[62684]: _type = "Task" [ 2964.754521] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2964.762654] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5266b618-3ac2-bdc3-e627-b9804a1e2818, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2965.265058] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5266b618-3ac2-bdc3-e627-b9804a1e2818, 'name': SearchDatastore_Task, 'duration_secs': 0.008759} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2965.265329] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Creating directory with path [datastore1] devstack-image-cache_base/017a863d-fd69-48d7-b76e-f0ef35837499/ts-2025-01-10-08-06-46 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2965.265601] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-17af1607-be5b-4af1-9d71-f56e67e88edc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2965.277974] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Created directory with path [datastore1] devstack-image-cache_base/017a863d-fd69-48d7-b76e-f0ef35837499/ts-2025-01-10-08-06-46 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2965.278124] env[62684]: INFO nova.virt.vmwareapi.imagecache [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Image 017a863d-fd69-48d7-b76e-f0ef35837499 is no longer used by this node. Pending deletion! [ 2965.278288] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Releasing lock "[datastore1] devstack-image-cache_base/017a863d-fd69-48d7-b76e-f0ef35837499" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2965.278493] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "[datastore1] devstack-image-cache_base/a61a91fe-7523-4ce3-ad13-bb533ab906e1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2965.278606] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired lock "[datastore1] devstack-image-cache_base/a61a91fe-7523-4ce3-ad13-bb533ab906e1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2965.278910] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/a61a91fe-7523-4ce3-ad13-bb533ab906e1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2965.279181] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ffc92a33-20da-449c-a6e5-1caa1bca42ff {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2965.283465] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Waiting for the task: (returnval){ [ 2965.283465] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521e8e25-efc3-f0d8-d37b-b4fde07c77d1" [ 2965.283465] env[62684]: _type = "Task" [ 2965.283465] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2965.291164] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521e8e25-efc3-f0d8-d37b-b4fde07c77d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2965.794217] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]521e8e25-efc3-f0d8-d37b-b4fde07c77d1, 'name': SearchDatastore_Task, 'duration_secs': 0.008685} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2965.794547] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Creating directory with path [datastore1] devstack-image-cache_base/a61a91fe-7523-4ce3-ad13-bb533ab906e1/ts-2025-01-10-08-06-46 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2965.794736] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6308b4cd-4d7d-4e63-b663-7b75cef637e9 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2965.807764] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Created directory with path [datastore1] devstack-image-cache_base/a61a91fe-7523-4ce3-ad13-bb533ab906e1/ts-2025-01-10-08-06-46 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2965.807902] env[62684]: INFO nova.virt.vmwareapi.imagecache [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Image a61a91fe-7523-4ce3-ad13-bb533ab906e1 is no longer used by this node. Pending deletion! [ 2965.808061] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Releasing lock "[datastore1] devstack-image-cache_base/a61a91fe-7523-4ce3-ad13-bb533ab906e1" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2965.808306] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "[datastore1] devstack-image-cache_base/f8e16436-21f3-4c18-94ee-c87b40e211f3" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2965.808420] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired lock "[datastore1] devstack-image-cache_base/f8e16436-21f3-4c18-94ee-c87b40e211f3" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2965.808718] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/f8e16436-21f3-4c18-94ee-c87b40e211f3" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2965.808959] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c97c0935-9717-4b18-bfc3-a238c595ddc5 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2965.812898] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Waiting for the task: (returnval){ [ 2965.812898] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52bfe442-7ba5-6cc6-c555-dd93fc97c782" [ 2965.812898] env[62684]: _type = "Task" [ 2965.812898] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2965.820116] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52bfe442-7ba5-6cc6-c555-dd93fc97c782, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2966.323911] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52bfe442-7ba5-6cc6-c555-dd93fc97c782, 'name': SearchDatastore_Task, 'duration_secs': 0.008202} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2966.324194] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Creating directory with path [datastore1] devstack-image-cache_base/f8e16436-21f3-4c18-94ee-c87b40e211f3/ts-2025-01-10-08-06-47 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2966.324471] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9a049b83-3143-419c-aade-c66fbb9016a0 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2966.336901] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Created directory with path [datastore1] devstack-image-cache_base/f8e16436-21f3-4c18-94ee-c87b40e211f3/ts-2025-01-10-08-06-47 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2966.337053] env[62684]: INFO nova.virt.vmwareapi.imagecache [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Image f8e16436-21f3-4c18-94ee-c87b40e211f3 is no longer used by this node. Pending deletion! [ 2966.337242] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Releasing lock "[datastore1] devstack-image-cache_base/f8e16436-21f3-4c18-94ee-c87b40e211f3" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2966.337453] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "[datastore1] devstack-image-cache_base/498d0eba-5111-45af-b8ef-266073f48eea" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2966.337569] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired lock "[datastore1] devstack-image-cache_base/498d0eba-5111-45af-b8ef-266073f48eea" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2966.337863] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/498d0eba-5111-45af-b8ef-266073f48eea" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2966.338158] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a00c462f-71b8-4ef7-a2f3-b6568de491fc {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2966.342375] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Waiting for the task: (returnval){ [ 2966.342375] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520e638e-91d6-a5c3-5124-2a2751700d6f" [ 2966.342375] env[62684]: _type = "Task" [ 2966.342375] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2966.349829] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520e638e-91d6-a5c3-5124-2a2751700d6f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2966.853421] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]520e638e-91d6-a5c3-5124-2a2751700d6f, 'name': SearchDatastore_Task, 'duration_secs': 0.007941} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2966.853813] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Creating directory with path [datastore1] devstack-image-cache_base/498d0eba-5111-45af-b8ef-266073f48eea/ts-2025-01-10-08-06-47 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2966.853983] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5be41b4f-12fc-49af-bce4-5e44f40ecf9d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2966.866815] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Created directory with path [datastore1] devstack-image-cache_base/498d0eba-5111-45af-b8ef-266073f48eea/ts-2025-01-10-08-06-47 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2966.866941] env[62684]: INFO nova.virt.vmwareapi.imagecache [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Image 498d0eba-5111-45af-b8ef-266073f48eea is no longer used by this node. Pending deletion! [ 2966.867150] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Releasing lock "[datastore1] devstack-image-cache_base/498d0eba-5111-45af-b8ef-266073f48eea" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2966.867364] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2966.867483] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2966.867820] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2966.868119] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e987d5f-28c4-4c67-b14c-c12eac91b92a {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2966.872520] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Waiting for the task: (returnval){ [ 2966.872520] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5205f1e0-7181-20ab-a146-511f637009c0" [ 2966.872520] env[62684]: _type = "Task" [ 2966.872520] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2966.880449] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5205f1e0-7181-20ab-a146-511f637009c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2967.383730] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5205f1e0-7181-20ab-a146-511f637009c0, 'name': SearchDatastore_Task, 'duration_secs': 0.009331} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2967.384070] env[62684]: INFO nova.virt.vmwareapi.imagecache [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Image [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9 is no longer used. Deleting! [ 2967.384224] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2967.384533] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c2dab016-9326-470e-9c72-74f9b55fee75 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2967.390491] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Waiting for the task: (returnval){ [ 2967.390491] env[62684]: value = "task-2054054" [ 2967.390491] env[62684]: _type = "Task" [ 2967.390491] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2967.399136] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': task-2054054, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2967.900931] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': task-2054054, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.101804} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2967.901447] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2967.901513] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2967.901855] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "[datastore1] devstack-image-cache_base/c9b0f77d-fd99-47ab-a4a9-32a2df054ef7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2967.902062] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired lock "[datastore1] devstack-image-cache_base/c9b0f77d-fd99-47ab-a4a9-32a2df054ef7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2967.902463] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9b0f77d-fd99-47ab-a4a9-32a2df054ef7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2967.902814] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c60d625b-489c-40a3-8f13-e4534bfcf916 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2967.907734] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Waiting for the task: (returnval){ [ 2967.907734] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a865f5-ff5c-20be-5526-46b1ae64b7f0" [ 2967.907734] env[62684]: _type = "Task" [ 2967.907734] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2967.915479] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a865f5-ff5c-20be-5526-46b1ae64b7f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2968.419025] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52a865f5-ff5c-20be-5526-46b1ae64b7f0, 'name': SearchDatastore_Task, 'duration_secs': 0.010573} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2968.419324] env[62684]: INFO nova.virt.vmwareapi.imagecache [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Image [datastore1] devstack-image-cache_base/c9b0f77d-fd99-47ab-a4a9-32a2df054ef7 is no longer used. Deleting! [ 2968.419468] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/c9b0f77d-fd99-47ab-a4a9-32a2df054ef7 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2968.419751] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e5dce165-7323-4f3a-aa20-10e390eded7d {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2968.426080] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Waiting for the task: (returnval){ [ 2968.426080] env[62684]: value = "task-2054055" [ 2968.426080] env[62684]: _type = "Task" [ 2968.426080] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2968.434103] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': task-2054055, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2968.936092] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': task-2054055, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.108269} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2968.936464] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2968.936518] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Releasing lock "[datastore1] devstack-image-cache_base/c9b0f77d-fd99-47ab-a4a9-32a2df054ef7" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2968.936945] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "[datastore1] devstack-image-cache_base/42cbc7df-0c6d-4749-80dd-8ed5341f4fe8" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2968.936945] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired lock "[datastore1] devstack-image-cache_base/42cbc7df-0c6d-4749-80dd-8ed5341f4fe8" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2968.937148] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/42cbc7df-0c6d-4749-80dd-8ed5341f4fe8" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2968.937447] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a001df75-b404-4a6e-9d2d-9489ab52b5d2 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2968.942021] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Waiting for the task: (returnval){ [ 2968.942021] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529a746d-e676-d8bc-e21f-8ad62a3b5560" [ 2968.942021] env[62684]: _type = "Task" [ 2968.942021] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2968.950263] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529a746d-e676-d8bc-e21f-8ad62a3b5560, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2969.453963] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]529a746d-e676-d8bc-e21f-8ad62a3b5560, 'name': SearchDatastore_Task, 'duration_secs': 0.009838} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2969.454376] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Creating directory with path [datastore1] devstack-image-cache_base/42cbc7df-0c6d-4749-80dd-8ed5341f4fe8/ts-2025-01-10-08-06-50 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2969.454732] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eb4c95c6-b3ec-491c-aa39-9539fa30f888 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2969.466479] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Created directory with path [datastore1] devstack-image-cache_base/42cbc7df-0c6d-4749-80dd-8ed5341f4fe8/ts-2025-01-10-08-06-50 {{(pid=62684) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2969.466638] env[62684]: INFO nova.virt.vmwareapi.imagecache [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Image 42cbc7df-0c6d-4749-80dd-8ed5341f4fe8 is no longer used by this node. Pending deletion! [ 2969.466856] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Releasing lock "[datastore1] devstack-image-cache_base/42cbc7df-0c6d-4749-80dd-8ed5341f4fe8" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2969.467179] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "[datastore1] devstack-image-cache_base/422b850b-0204-49b2-b917-fbbcd3c7d832" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2969.467354] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired lock "[datastore1] devstack-image-cache_base/422b850b-0204-49b2-b917-fbbcd3c7d832" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2969.467743] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/422b850b-0204-49b2-b917-fbbcd3c7d832" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2969.468076] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7b128d4-d434-401b-8666-de69a9862de3 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2969.472509] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Waiting for the task: (returnval){ [ 2969.472509] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5251d9e7-98b1-a726-346f-4349d76b4acb" [ 2969.472509] env[62684]: _type = "Task" [ 2969.472509] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2969.481156] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5251d9e7-98b1-a726-346f-4349d76b4acb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2969.983769] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]5251d9e7-98b1-a726-346f-4349d76b4acb, 'name': SearchDatastore_Task, 'duration_secs': 0.008886} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2969.984164] env[62684]: INFO nova.virt.vmwareapi.imagecache [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Image [datastore1] devstack-image-cache_base/422b850b-0204-49b2-b917-fbbcd3c7d832 is no longer used. Deleting! [ 2969.984217] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/422b850b-0204-49b2-b917-fbbcd3c7d832 {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2969.984506] env[62684]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7c2dbb00-a891-40e0-a0a2-e656d5972cdf {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2969.991472] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Waiting for the task: (returnval){ [ 2969.991472] env[62684]: value = "task-2054056" [ 2969.991472] env[62684]: _type = "Task" [ 2969.991472] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2969.998876] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': task-2054056, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2970.501472] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': task-2054056, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.103392} completed successfully. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2970.501693] env[62684]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Deleted the datastore file {{(pid=62684) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2970.501865] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Releasing lock "[datastore1] devstack-image-cache_base/422b850b-0204-49b2-b917-fbbcd3c7d832" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2970.502115] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquiring lock "[datastore1] devstack-image-cache_base/3189ab76-9829-4da9-bd5f-cefda10b0ab0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2970.502240] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired lock "[datastore1] devstack-image-cache_base/3189ab76-9829-4da9-bd5f-cefda10b0ab0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2970.502567] env[62684]: DEBUG oslo_concurrency.lockutils [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/3189ab76-9829-4da9-bd5f-cefda10b0ab0" {{(pid=62684) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2970.502858] env[62684]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8db1cf3-0446-4b4b-b94a-724b840c7554 {{(pid=62684) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2970.507216] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Waiting for the task: (returnval){ [ 2970.507216] env[62684]: value = "session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52bc12de-092f-6073-36b3-d9a551c86048" [ 2970.507216] env[62684]: _type = "Task" [ 2970.507216] env[62684]: } to complete. {{(pid=62684) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2970.514243] env[62684]: DEBUG oslo_vmware.api [None req-cc9d7ebe-6498-417c-afa3-2ecd3f8038d1 None None] Task: {'id': session[52c40fe6-c9e6-68a7-2d33-47e8ab5160ac]52bc12de-092f-6073-36b3-d9a551c86048, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62684) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}}